| 2023.01.01 | 新增GCP BigQuery/Dataflow JJS/QlikSense BI模板。

This commit is contained in:
Moshow郑锴
2023-01-01 16:26:10 +08:00
parent 107c1035e5
commit bc123b8f27
8 changed files with 235 additions and 61 deletions

View File

@@ -47,7 +47,7 @@ spring:
#mvc:
# static-path-pattern: /statics/**
OEM:
version: 3.2
version: 2023.1
header: SQL转Java JPA、MYBATIS实现类代码生成平台
keywords: sql转实体类,sql转DAO,SQL转service,SQL转JPA实现,SQL转MYBATIS实现
title: JAVA在线代码生成

View File

@@ -47,7 +47,7 @@ spring:
#mvc:
# static-path-pattern: /statics/**
OEM:
version: 3.2
version: 2023.1
header: SQL转Java JPA、MYBATIS实现类代码生成平台
keywords: sql转实体类,sql转DAO,SQL转service,SQL转JPA实现,SQL转MYBATIS实现
title: JAVA代码生成平台

View File

@@ -47,7 +47,7 @@ spring:
#mvc:
# static-path-pattern: /statics/**
OEM:
version: 3.2
version: 2023.1
header: SQL转Java JPA、MYBATIS实现类代码生成平台
keywords: sql转实体类,sql转DAO,SQL转service,SQL转JPA实现,SQL转MYBATIS实现
title: JAVA在线代码生成

View File

@@ -230,7 +230,6 @@
}
]
},
{
"group": "jpa-starp",
"templates": [{
@@ -249,5 +248,28 @@
"description": "jpacontroller"
}
]
},
{
"group": "bi",
"templates": [{
"id": "201",
"name": "qliksense",
"description": "qlik sense"
}]
},
{
"group": "cloud",
"templates": [
{
"id": "301",
"name": "bigquery",
"description": "GCP BigQuery"
},
{
"id": "302",
"name": "dataflowjjs",
"description": "GCP Dataflow JJS"
}
]
}
]
]

View File

@@ -0,0 +1,94 @@
//***************************
//[${classInfo.classComment} - ${classInfo.tableName}]
//AUTHOR ${authorName}
//HISTORY ${.now?string('yyyy-MM-dd')}
//***************************
//***************************
//load all
[${classInfo.tableName}]:
LOAD * FROM ['LIB://QVD/${classInfo.className}.qvd'](qvd);
//***************************
//load columns
[${classInfo.tableName}]:
LOAD
<#list classInfo.fieldList as fieldItem >
"${fieldItem.columnName}" as "${fieldItem.fieldName}"<#if fieldItem_has_next>,</#if>
</#list>
FROM
['LIB://QVD/${classInfo.className}.qvd'](qvd);
;
//load inline
[${classInfo.tableName}]:
LOAD * INLINE
[
<#list classInfo.fieldList as fieldItem >${fieldItem.columnName} <#if fieldItem_has_next>,</#if></#list>
<#list classInfo.fieldList as fieldItem >${fieldItem.fieldName} <#if fieldItem_has_next>,</#if></#list>
<#list classInfo.fieldList as fieldItem >${fieldItem.fieldComment} <#if fieldItem_has_next>,</#if></#list>
];
//***************************
//load from api data connection (wrap on)
LIB CONNECT TO '${classInfo.tableName}_api';
RestConnectorMasterTable:
SQL SELECT
"__KEY_root",
(SELECT
<#list classInfo.fieldList as fieldItem >
"${fieldItem.columnName}"
</#list>
"__FK_object"
FROM "object" FK "__FK_object")
FROM JSON (wrap on) "root" PK "__KEY_root"
// WITH CONNECTION (
// Url "https://localhost:8080/${classInfo.tableName}_api",
// QUERY "page" "1",
// QUERY "size" "100",
// HTTPHEADER "token" "123456",
// BODY "Post body here")
;
[${classInfo.className}]:
LOAD
<#list classInfo.fieldList as fieldItem >
[${fieldItem.columnName}] as [${fieldItem.fieldName}]
</#list>
[__FK_object] AS [__KEY_root]
RESIDENT RestConnectorMasterTable
WHERE NOT IsNull([__FK_stores]);
DROP TABLE [${classInfo.className}];
DROP TABLE RestConnectorMasterTable;
//***************************
//load from api data connection (wrap off)
LIB CONNECT TO '${classInfo.tableName}_api';
[${classInfo.className}]:
SQL SELECT
<#list classInfo.fieldList as fieldItem >
[${fieldItem.fieldName}] as [${fieldItem.fieldName}]<#if fieldItem_has_next>,</#if>
</#list>
FROM JSON(wrap off) "${classInfo.className}"
// WITH CONNECTION (
// Url "https://localhost:8080/${classInfo.tableName}_api",
// QUERY "page" "1",
// QUERY "size" "100",
// HTTPHEADER "token" "123456",
// BODY "Post body here")
;
//***************************
//load from sql data connection
LIB CONNECT TO '${classInfo.tableName}_db';
SQL SELECT
<#list classInfo.fieldList as fieldItem >
[${fieldItem.columnName}] as [${fieldItem.fieldName}]<#if fieldItem_has_next>,</#if>
</#list>
FROM
${classInfo.tableName}
WHERE
Create_Time > '2023-01-01 00:00:00';

View File

@@ -0,0 +1,17 @@
SELECT * FROM 'your_project.your_dataset.${tableName}' t
order by t.id desc
LIMIT 100
;
SELECT * FROM 'your_project.your_dataset.${tableName}_error_records' t
order by t.timestamp desc
LIMIT 100
;
bigquery table -> SCHEMA -> Edit as text , then input below text:
[
<#list classInfo.fieldList as fieldItem >
{"name":"${fieldItem.columnName}",type:"STRING","mode":"NULLABLE","description": "${fieldItem.fieldName} - ${fieldItem.fieldComment}"}<#if fieldItem_has_next>,</#if>
</#list>
]

View File

@@ -0,0 +1,38 @@
/**
* GCP - dataflow job jjs for [${classInfo.classComment} - ${classInfo.tableName}]
* AUTHOR ${authorName}
*
* User-defined function (UDF) to transform events as part of a Dataflow template job.
* upload to GCS and create dataflow job with this js file and method as 'process'
* @param {string} inJson input Pub/Sub JSON message (stringified)
* @return {string} outJson output JSON message (stringified)
*/
function process(inJson) {
//for local js debug
//var obj = JSON.parse(JSON.stringify(inJson));
//for online jjs
var obj = JSON.parse(inJson);
var includePubsubMessage = obj.data && obj.attributes;
var data = includePubsubMessage ? obj.data : obj;
//debug and show error if you need special logic
if(data.hasOwnProperty('show_error')){
throw new ERROR("show_error:"+JSON.stringify(data))
}
// INSERT CUSTOM TRANSFORMATION LOGIC HERE
var tableObj= {};
tableObj.insert_time=new Date().toUTCString()
<#list classInfo.fieldList as fieldItem >
tableObj.${fieldItem.columnName}=data.${fieldItem.fieldName}
</#list>
return JSON.stringify(tableObj);
}
//field name = field name
<#list classInfo.fieldList as fieldItem >
tableObj.${fieldItem.fieldName}=data.${fieldItem.fieldName}
</#list>
//column name = column name
<#list classInfo.fieldList as fieldItem >
tableObj.${fieldItem.columnName}=data.${fieldItem.columnName}
</#list>