<transformation>
|
<info>
|
<name>forecast_quota</name>
|
<description/>
|
<extended_description/>
|
<trans_version/>
|
<trans_type>Normal</trans_type>
|
<trans_status>0</trans_status>
|
<directory>/forecast</directory>
|
<parameters>
|
<parameter>
|
<name>DB_HOST</name>
|
<default_value>192.168.123.74</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>DB_NAME</name>
|
<default_value>huishui_ai_dev</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>DB_PASS</name>
|
<default_value>p@ssw0rd</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>DB_PORT</name>
|
<default_value>5432</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>DB_USER</name>
|
<default_value>postgres</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>DS_ID</name>
|
<default_value>DS_43</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>PYTHON_ENV</name>
|
<default_value>{"exec":"D:/rub/ai_python/pythonRun.bat", "path":"d:/rub/ai_python/"}</default_value>
|
<description/>
|
</parameter>
|
<parameter>
|
<name>TIME_DAY</name>
|
<default_value>2025-02-25 00:00:00</default_value>
|
<description/>
|
</parameter>
|
</parameters>
|
<log>
|
<trans-log-table>
|
<connection/>
|
<schema/>
|
<table/>
|
<size_limit_lines/>
|
<interval/>
|
<timeout_days/>
|
<field>
|
<id>ID_BATCH</id>
|
<enabled>Y</enabled>
|
<name>ID_BATCH</name>
|
</field>
|
<field>
|
<id>CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>CHANNEL_ID</name>
|
</field>
|
<field>
|
<id>TRANSNAME</id>
|
<enabled>Y</enabled>
|
<name>TRANSNAME</name>
|
</field>
|
<field>
|
<id>STATUS</id>
|
<enabled>Y</enabled>
|
<name>STATUS</name>
|
</field>
|
<field>
|
<id>LINES_READ</id>
|
<enabled>Y</enabled>
|
<name>LINES_READ</name>
|
<subject/>
|
</field>
|
<field>
|
<id>LINES_WRITTEN</id>
|
<enabled>Y</enabled>
|
<name>LINES_WRITTEN</name>
|
<subject/>
|
</field>
|
<field>
|
<id>LINES_UPDATED</id>
|
<enabled>Y</enabled>
|
<name>LINES_UPDATED</name>
|
<subject/>
|
</field>
|
<field>
|
<id>LINES_INPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_INPUT</name>
|
<subject/>
|
</field>
|
<field>
|
<id>LINES_OUTPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_OUTPUT</name>
|
<subject/>
|
</field>
|
<field>
|
<id>LINES_REJECTED</id>
|
<enabled>Y</enabled>
|
<name>LINES_REJECTED</name>
|
<subject/>
|
</field>
|
<field>
|
<id>ERRORS</id>
|
<enabled>Y</enabled>
|
<name>ERRORS</name>
|
</field>
|
<field>
|
<id>STARTDATE</id>
|
<enabled>Y</enabled>
|
<name>STARTDATE</name>
|
</field>
|
<field>
|
<id>ENDDATE</id>
|
<enabled>Y</enabled>
|
<name>ENDDATE</name>
|
</field>
|
<field>
|
<id>LOGDATE</id>
|
<enabled>Y</enabled>
|
<name>LOGDATE</name>
|
</field>
|
<field>
|
<id>DEPDATE</id>
|
<enabled>Y</enabled>
|
<name>DEPDATE</name>
|
</field>
|
<field>
|
<id>REPLAYDATE</id>
|
<enabled>Y</enabled>
|
<name>REPLAYDATE</name>
|
</field>
|
<field>
|
<id>LOG_FIELD</id>
|
<enabled>Y</enabled>
|
<name>LOG_FIELD</name>
|
</field>
|
<field>
|
<id>EXECUTING_SERVER</id>
|
<enabled>N</enabled>
|
<name>EXECUTING_SERVER</name>
|
</field>
|
<field>
|
<id>EXECUTING_USER</id>
|
<enabled>N</enabled>
|
<name>EXECUTING_USER</name>
|
</field>
|
<field>
|
<id>CLIENT</id>
|
<enabled>N</enabled>
|
<name>CLIENT</name>
|
</field>
|
</trans-log-table>
|
<perf-log-table>
|
<connection/>
|
<schema/>
|
<table/>
|
<interval/>
|
<timeout_days/>
|
<field>
|
<id>ID_BATCH</id>
|
<enabled>Y</enabled>
|
<name>ID_BATCH</name>
|
</field>
|
<field>
|
<id>SEQ_NR</id>
|
<enabled>Y</enabled>
|
<name>SEQ_NR</name>
|
</field>
|
<field>
|
<id>LOGDATE</id>
|
<enabled>Y</enabled>
|
<name>LOGDATE</name>
|
</field>
|
<field>
|
<id>TRANSNAME</id>
|
<enabled>Y</enabled>
|
<name>TRANSNAME</name>
|
</field>
|
<field>
|
<id>STEPNAME</id>
|
<enabled>Y</enabled>
|
<name>STEPNAME</name>
|
</field>
|
<field>
|
<id>STEP_COPY</id>
|
<enabled>Y</enabled>
|
<name>STEP_COPY</name>
|
</field>
|
<field>
|
<id>LINES_READ</id>
|
<enabled>Y</enabled>
|
<name>LINES_READ</name>
|
</field>
|
<field>
|
<id>LINES_WRITTEN</id>
|
<enabled>Y</enabled>
|
<name>LINES_WRITTEN</name>
|
</field>
|
<field>
|
<id>LINES_UPDATED</id>
|
<enabled>Y</enabled>
|
<name>LINES_UPDATED</name>
|
</field>
|
<field>
|
<id>LINES_INPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_INPUT</name>
|
</field>
|
<field>
|
<id>LINES_OUTPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_OUTPUT</name>
|
</field>
|
<field>
|
<id>LINES_REJECTED</id>
|
<enabled>Y</enabled>
|
<name>LINES_REJECTED</name>
|
</field>
|
<field>
|
<id>ERRORS</id>
|
<enabled>Y</enabled>
|
<name>ERRORS</name>
|
</field>
|
<field>
|
<id>INPUT_BUFFER_ROWS</id>
|
<enabled>Y</enabled>
|
<name>INPUT_BUFFER_ROWS</name>
|
</field>
|
<field>
|
<id>OUTPUT_BUFFER_ROWS</id>
|
<enabled>Y</enabled>
|
<name>OUTPUT_BUFFER_ROWS</name>
|
</field>
|
</perf-log-table>
|
<channel-log-table>
|
<connection/>
|
<schema/>
|
<table/>
|
<timeout_days/>
|
<field>
|
<id>ID_BATCH</id>
|
<enabled>Y</enabled>
|
<name>ID_BATCH</name>
|
</field>
|
<field>
|
<id>CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>CHANNEL_ID</name>
|
</field>
|
<field>
|
<id>LOG_DATE</id>
|
<enabled>Y</enabled>
|
<name>LOG_DATE</name>
|
</field>
|
<field>
|
<id>LOGGING_OBJECT_TYPE</id>
|
<enabled>Y</enabled>
|
<name>LOGGING_OBJECT_TYPE</name>
|
</field>
|
<field>
|
<id>OBJECT_NAME</id>
|
<enabled>Y</enabled>
|
<name>OBJECT_NAME</name>
|
</field>
|
<field>
|
<id>OBJECT_COPY</id>
|
<enabled>Y</enabled>
|
<name>OBJECT_COPY</name>
|
</field>
|
<field>
|
<id>REPOSITORY_DIRECTORY</id>
|
<enabled>Y</enabled>
|
<name>REPOSITORY_DIRECTORY</name>
|
</field>
|
<field>
|
<id>FILENAME</id>
|
<enabled>Y</enabled>
|
<name>FILENAME</name>
|
</field>
|
<field>
|
<id>OBJECT_ID</id>
|
<enabled>Y</enabled>
|
<name>OBJECT_ID</name>
|
</field>
|
<field>
|
<id>OBJECT_REVISION</id>
|
<enabled>Y</enabled>
|
<name>OBJECT_REVISION</name>
|
</field>
|
<field>
|
<id>PARENT_CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>PARENT_CHANNEL_ID</name>
|
</field>
|
<field>
|
<id>ROOT_CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>ROOT_CHANNEL_ID</name>
|
</field>
|
</channel-log-table>
|
<step-log-table>
|
<connection/>
|
<schema/>
|
<table/>
|
<timeout_days/>
|
<field>
|
<id>ID_BATCH</id>
|
<enabled>Y</enabled>
|
<name>ID_BATCH</name>
|
</field>
|
<field>
|
<id>CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>CHANNEL_ID</name>
|
</field>
|
<field>
|
<id>LOG_DATE</id>
|
<enabled>Y</enabled>
|
<name>LOG_DATE</name>
|
</field>
|
<field>
|
<id>TRANSNAME</id>
|
<enabled>Y</enabled>
|
<name>TRANSNAME</name>
|
</field>
|
<field>
|
<id>STEPNAME</id>
|
<enabled>Y</enabled>
|
<name>STEPNAME</name>
|
</field>
|
<field>
|
<id>STEP_COPY</id>
|
<enabled>Y</enabled>
|
<name>STEP_COPY</name>
|
</field>
|
<field>
|
<id>LINES_READ</id>
|
<enabled>Y</enabled>
|
<name>LINES_READ</name>
|
</field>
|
<field>
|
<id>LINES_WRITTEN</id>
|
<enabled>Y</enabled>
|
<name>LINES_WRITTEN</name>
|
</field>
|
<field>
|
<id>LINES_UPDATED</id>
|
<enabled>Y</enabled>
|
<name>LINES_UPDATED</name>
|
</field>
|
<field>
|
<id>LINES_INPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_INPUT</name>
|
</field>
|
<field>
|
<id>LINES_OUTPUT</id>
|
<enabled>Y</enabled>
|
<name>LINES_OUTPUT</name>
|
</field>
|
<field>
|
<id>LINES_REJECTED</id>
|
<enabled>Y</enabled>
|
<name>LINES_REJECTED</name>
|
</field>
|
<field>
|
<id>ERRORS</id>
|
<enabled>Y</enabled>
|
<name>ERRORS</name>
|
</field>
|
<field>
|
<id>LOG_FIELD</id>
|
<enabled>N</enabled>
|
<name>LOG_FIELD</name>
|
</field>
|
</step-log-table>
|
<metrics-log-table>
|
<connection/>
|
<schema/>
|
<table/>
|
<timeout_days/>
|
<field>
|
<id>ID_BATCH</id>
|
<enabled>Y</enabled>
|
<name>ID_BATCH</name>
|
</field>
|
<field>
|
<id>CHANNEL_ID</id>
|
<enabled>Y</enabled>
|
<name>CHANNEL_ID</name>
|
</field>
|
<field>
|
<id>LOG_DATE</id>
|
<enabled>Y</enabled>
|
<name>LOG_DATE</name>
|
</field>
|
<field>
|
<id>METRICS_DATE</id>
|
<enabled>Y</enabled>
|
<name>METRICS_DATE</name>
|
</field>
|
<field>
|
<id>METRICS_CODE</id>
|
<enabled>Y</enabled>
|
<name>METRICS_CODE</name>
|
</field>
|
<field>
|
<id>METRICS_DESCRIPTION</id>
|
<enabled>Y</enabled>
|
<name>METRICS_DESCRIPTION</name>
|
</field>
|
<field>
|
<id>METRICS_SUBJECT</id>
|
<enabled>Y</enabled>
|
<name>METRICS_SUBJECT</name>
|
</field>
|
<field>
|
<id>METRICS_TYPE</id>
|
<enabled>Y</enabled>
|
<name>METRICS_TYPE</name>
|
</field>
|
<field>
|
<id>METRICS_VALUE</id>
|
<enabled>Y</enabled>
|
<name>METRICS_VALUE</name>
|
</field>
|
</metrics-log-table>
|
</log>
|
<maxdate>
|
<connection/>
|
<table/>
|
<field/>
|
<offset>0.0</offset>
|
<maxdiff>0.0</maxdiff>
|
</maxdate>
|
<size_rowset>10000</size_rowset>
|
<sleep_time_empty>50</sleep_time_empty>
|
<sleep_time_full>50</sleep_time_full>
|
<unique_connections>N</unique_connections>
|
<feedback_shown>Y</feedback_shown>
|
<feedback_size>50000</feedback_size>
|
<using_thread_priorities>Y</using_thread_priorities>
|
<shared_objects_file/>
|
<capture_step_performance>N</capture_step_performance>
|
<step_performance_capturing_delay>1000</step_performance_capturing_delay>
|
<step_performance_capturing_size_limit>100</step_performance_capturing_size_limit>
|
<dependencies>
|
</dependencies>
|
<partitionschemas>
|
</partitionschemas>
|
<slaveservers>
|
</slaveservers>
|
<clusterschemas>
|
</clusterschemas>
|
<created_user>-</created_user>
|
<created_date>2025/02/25 12:36:17.249</created_date>
|
<modified_user>-</modified_user>
|
<modified_date>2025/02/26 18:28:31.173</modified_date>
|
<key_for_session_key>H4sIAAAAAAAAAAMAAAAAAAAAAAA=</key_for_session_key>
|
<is_key_private>N</is_key_private>
|
</info>
|
<notepads>
|
</notepads>
|
<connection>
|
<name>ai_sys_db</name>
|
<server>${DB_HOST}</server>
|
<type>HWNPOSTGRESQL</type>
|
<access>Native</access>
|
<database>${DB_NAME}</database>
|
<port>${DB_PORT}</port>
|
<username>${DB_USER}</username>
|
<password>${DB_PASS}</password>
|
<servername/>
|
<data_tablespace/>
|
<index_tablespace/>
|
<attributes>
|
<attribute>
|
<code>FORCE_IDENTIFIERS_TO_LOWERCASE</code>
|
<attribute>N</attribute>
|
</attribute>
|
<attribute>
|
<code>FORCE_IDENTIFIERS_TO_UPPERCASE</code>
|
<attribute>N</attribute>
|
</attribute>
|
<attribute>
|
<code>IS_CLUSTERED</code>
|
<attribute>N</attribute>
|
</attribute>
|
<attribute>
|
<code>PORT_NUMBER</code>
|
<attribute>${DB_PORT}</attribute>
|
</attribute>
|
<attribute>
|
<code>PRESERVE_RESERVED_WORD_CASE</code>
|
<attribute>Y</attribute>
|
</attribute>
|
<attribute>
|
<code>QUOTE_ALL_FIELDS</code>
|
<attribute>N</attribute>
|
</attribute>
|
<attribute>
|
<code>SUPPORTS_BOOLEAN_DATA_TYPE</code>
|
<attribute>Y</attribute>
|
</attribute>
|
<attribute>
|
<code>SUPPORTS_TIMESTAMP_DATA_TYPE</code>
|
<attribute>Y</attribute>
|
</attribute>
|
<attribute>
|
<code>USE_POOLING</code>
|
<attribute>N</attribute>
|
</attribute>
|
</attributes>
|
</connection>
|
<order>
|
<hop>
|
<from>表输入</from>
|
<to>生成预测数据</to>
|
<enabled>Y</enabled>
|
</hop>
|
<hop>
|
<from>生成预测数据</from>
|
<to>输出结果</to>
|
<enabled>Y</enabled>
|
</hop>
|
</order>
|
<step>
|
<name>生成预测数据</name>
|
<type>HSWaterScriptValuesMod</type>
|
<description/>
|
<distribute>Y</distribute>
|
<custom_distribution/>
|
<copies>1</copies>
|
<partitioning>
|
<method>none</method>
|
<schema_name/>
|
</partitioning>
|
<compatible>N</compatible>
|
<optimizationLevel>9</optimizationLevel>
|
<jsScripts>
|
<jsScript>
|
<jsScript_type>1</jsScript_type>
|
<jsScript_name>JS_BEGIN</jsScript_name>
|
<jsScript_script>
|
|
</jsScript_script>
|
</jsScript>
|
<jsScript>
|
<jsScript_type>0</jsScript_type>
|
<jsScript_name>JS_RUN</jsScript_name>
|
<jsScript_script>// 获取变量
|
var pythonEnv = strToJson(getVariable("PYTHON_ENV", ""));
|
var pythonRoot = pythonEnv["path"] + "f_" + "aaaaa";//allocUUID();
|
var pythonExec = pythonEnv["exec"];
|
var TIME_DAY = getVariable("TIME_DAY", "");
|
var DS_ID = getVariable("DS_ID", "");
|
|
// 获取系统信息
|
var dbSys = allocKettleDB("ai_sys_db");
|
try
|
{
|
var recs = dbSys.querySQL("SELECT ds_config FROM ai_base.ai_data_source WHERE ds_id={DS_ID}", {DS_ID:"S" + DS_ID});
|
if(recs.length == 0)
|
throw "can't find DS ID : " + DS_ID;
|
var connInfo = strToJson(recs[0]["DS_CONFIG"]);
|
}
|
finally
|
{
|
dbSys.close();
|
}
|
|
// 查询延时数据
|
var db = allocJDBC(connInfo.driver, connInfo.url, connInfo.user, connInfo.pass);
|
try
|
{
|
var recs = db.querySQL(
|
"SELECT OTIME, SDVAL FROM sd_values.sd_raws WHERE OTYPE={OTYPE} AND ONAME={ONAME} AND OTIME >={TIME_DAY}::timestamp + '-14 days' AND OTIME <{TIME_DAY}::timestamp + '1 days' LIMIT 10",{
|
OTYPE : "S" + otype,
|
ONAME : "S" + oname,
|
TIME_DAY : "T" + TIME_DAY,
|
});
|
}
|
finally
|
{
|
db.close();
|
}
|
|
// 生成python需要的数据
|
var time = [];
|
var value= [];
|
for(var recIndex in recs)
|
{
|
var rec = recs[recIndex];
|
time.push(rec["OTIME"]);
|
value.push(rec["SDVAL"]);
|
}
|
|
var pythonInArg = {
|
prophet_starttime : TIME_DAY.substring(0, 10) + " 00:00:00",
|
prophet_endtime : TIME_DAY.substring(0, 10) + " 23:59:59",
|
periods : 1,
|
data : {
|
time : time,
|
value : value
|
}
|
};
|
saveFileText(pythonRoot + "_arg_in.json", jsonToStr(pythonInArg));
|
saveFileText(pythonRoot + "_code.py", this["PYTHON"]);
|
|
var execCmd = pythonExec + " " + pythonRoot + "_code.py" + " " + pythonRoot + "_arg_in.json" + " " + pythonRoot + "_arg_out.json";
|
writeToLog("b", execCmd);
|
execProcess(execCmd);
|
|
if(fileExists(pythonRoot + "_arg_out.json"))
|
{
|
var resultList = strToJson(loadFileContent(pythonRoot + "_arg_out.json"));
|
|
|
for(var resultIndex in resultList)
|
{
|
var result = resultList[resultIndex];
|
var output = {
|
"otime" : result["ds"],
|
"o_otype" : otype,
|
"o_oname" : oname,
|
"yhat" : result["yhat"],
|
"yhat_lower":result["yhat_lower"],
|
"yhat_upper":result["yhat_upper"],
|
}
|
outputRecord(output);
|
}
|
}
|
else
|
{
|
writeToLog("b", "no write eval reuslt : " + otype + "." + oname);
|
}
|
|
if(fileExists(pythonRoot + "_code.py"))
|
deleteFile(pythonRoot + "_code.py");
|
if(fileExists(pythonRoot + "_arg_in.json"))
|
deleteFile(pythonRoot + "_arg_in.json");
|
if(fileExists(pythonRoot + "_arg_out.json"))
|
deleteFile(pythonRoot + "_arg_out.json");
|
|
trans_Status = SKIP_TRANSFORMATION;
|
</jsScript_script>
|
</jsScript>
|
<jsScript>
|
<jsScript_type>2</jsScript_type>
|
<jsScript_name>JS_END</jsScript_name>
|
<jsScript_script>//Script here
|
|
</jsScript_script>
|
</jsScript>
|
<jsScript>
|
<jsScript_type>-1</jsScript_type>
|
<jsScript_name>PYTHON</jsScript_name>
|
<jsScript_script>from lib.system import *
|
# import subprocess
|
from lib.Forcast.API import do_prophet
|
argJson = readArgJson()
|
startTime = argJson['prophet_starttime']
|
endTime = argJson['prophet_endtime']
|
periods = argJson['periods']
|
# 使用subprocess.run()执行shell命令
|
# result = subprocess.run(["dir"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, text=True).stdout
|
result = do_prophet( str(argJson['data']), periods)
|
# print(argJson['data'])
|
|
# 使用 strict=False 来处理不规范的 JSON 字符串
|
# rawRecs1 = json.loads(argJson['data'].replace("'", '"'), strict=False)
|
|
# writeResultText(str(argJson['data']))
|
writeResultText(result)
|
|
</jsScript_script>
|
</jsScript>
|
</jsScripts>
|
<fields>
|
<field>
|
<name>otime</name>
|
<rename>otime</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
<field>
|
<name>o_otype</name>
|
<rename>o_otype</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
<field>
|
<name>o_oname</name>
|
<rename>o_oname</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
<field>
|
<name>yhat</name>
|
<rename>yhat</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
<field>
|
<name>yhat_lower</name>
|
<rename>yhat_lower</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
<field>
|
<name>yhat_upper</name>
|
<rename>yhat_upper</rename>
|
<type>String</type>
|
<length>-1</length>
|
<precision>-1</precision>
|
<replace>N</replace>
|
</field>
|
</fields>
|
<attributes/>
|
<cluster_schema/>
|
<remotesteps>
|
<input>
|
</input>
|
<output>
|
</output>
|
</remotesteps>
|
<GUI>
|
<xloc>352</xloc>
|
<yloc>304</yloc>
|
<draw>Y</draw>
|
</GUI>
|
</step>
|
<step>
|
<name>表输入</name>
|
<type>TableInput</type>
|
<description/>
|
<distribute>Y</distribute>
|
<custom_distribution/>
|
<copies>1</copies>
|
<partitioning>
|
<method>none</method>
|
<schema_name/>
|
</partitioning>
|
<connection>ai_sys_db</connection>
|
<sql>SELECT * FROM ai_busi_map.nw_quota_forest_name</sql>
|
<limit>0</limit>
|
<lookup/>
|
<execute_each_row>N</execute_each_row>
|
<variables_active>Y</variables_active>
|
<lazy_conversion_active>N</lazy_conversion_active>
|
<cached_row_meta_active>N</cached_row_meta_active>
|
<row-meta>
|
<value-meta>
|
<type>String</type>
|
<storagetype>normal</storagetype>
|
<name>otype</name>
|
<length>256</length>
|
<precision>-1</precision>
|
<origin>表输入</origin>
|
<comments>otype</comments>
|
<conversion_Mask/>
|
<decimal_symbol>.</decimal_symbol>
|
<grouping_symbol>,</grouping_symbol>
|
<currency_symbol/>
|
<trim_type>none</trim_type>
|
<case_insensitive>N</case_insensitive>
|
<collator_disabled>Y</collator_disabled>
|
<collator_strength>0</collator_strength>
|
<sort_descending>N</sort_descending>
|
<output_padding>N</output_padding>
|
<date_format_lenient>N</date_format_lenient>
|
<date_format_locale>zh_CN</date_format_locale>
|
<date_format_timezone>Asia/Shanghai</date_format_timezone>
|
<lenient_string_to_number>N</lenient_string_to_number>
|
</value-meta>
|
<value-meta>
|
<type>String</type>
|
<storagetype>normal</storagetype>
|
<name>oname</name>
|
<length>256</length>
|
<precision>-1</precision>
|
<origin>表输入</origin>
|
<comments>oname</comments>
|
<conversion_Mask/>
|
<decimal_symbol>.</decimal_symbol>
|
<grouping_symbol>,</grouping_symbol>
|
<currency_symbol/>
|
<trim_type>none</trim_type>
|
<case_insensitive>N</case_insensitive>
|
<collator_disabled>Y</collator_disabled>
|
<collator_strength>0</collator_strength>
|
<sort_descending>N</sort_descending>
|
<output_padding>N</output_padding>
|
<date_format_lenient>N</date_format_lenient>
|
<date_format_locale>zh_CN</date_format_locale>
|
<date_format_timezone>Asia/Shanghai</date_format_timezone>
|
<lenient_string_to_number>N</lenient_string_to_number>
|
</value-meta>
|
</row-meta>
|
<attributes/>
|
<cluster_schema/>
|
<remotesteps>
|
<input>
|
</input>
|
<output>
|
</output>
|
</remotesteps>
|
<GUI>
|
<xloc>160</xloc>
|
<yloc>304</yloc>
|
<draw>Y</draw>
|
</GUI>
|
</step>
|
<step>
|
<name>输出结果</name>
|
<type>HSWaterScriptValuesMod</type>
|
<description/>
|
<distribute>Y</distribute>
|
<custom_distribution/>
|
<copies>1</copies>
|
<partitioning>
|
<method>none</method>
|
<schema_name/>
|
</partitioning>
|
<compatible>N</compatible>
|
<optimizationLevel>9</optimizationLevel>
|
<jsScripts>
|
<jsScript>
|
<jsScript_type>1</jsScript_type>
|
<jsScript_name>JS_BEGIN</jsScript_name>
|
<jsScript_script>g_db = allocKettleDB("ai_sys_db");
|
</jsScript_script>
|
</jsScript>
|
<jsScript>
|
<jsScript_type>0</jsScript_type>
|
<jsScript_name>JS_STEP</jsScript_name>
|
<jsScript_script>var sql =
|
" INSERT INTO ai_busi_map.nw_quota_forest("
|
+ " otime,otype,oname,"
|
+ " yhat,"
|
+ " yhat_lower,"
|
+ " yhat_upper"
|
+ " )VALUES("
|
+ " {otime},{otype},{oname},"
|
+ " {yhat},"
|
+ " {yhat_lower},"
|
+ " {yhat_upper}"
|
+ " )"
|
+ "ON CONFLICT (otime, otype, oname) DO NOTHING"
|
;
|
|
g_db.executeSQL(sql,
|
{
|
otime : "T" + otime,
|
otype : "S" + o_otype,
|
oname : "S" + o_oname,
|
yhat : "D" + yhat,
|
yhat_lower : "D" + yhat_lower,
|
yhat_upper : "D" + yhat_upper,
|
|
});
|
|
</jsScript_script>
|
</jsScript>
|
<jsScript>
|
<jsScript_type>2</jsScript_type>
|
<jsScript_name>JS_END</jsScript_name>
|
<jsScript_script>g_db.close();
|
|
</jsScript_script>
|
</jsScript>
|
</jsScripts>
|
<fields> </fields>
|
<attributes/>
|
<cluster_schema/>
|
<remotesteps>
|
<input>
|
</input>
|
<output>
|
</output>
|
</remotesteps>
|
<GUI>
|
<xloc>528</xloc>
|
<yloc>304</yloc>
|
<draw>Y</draw>
|
</GUI>
|
</step>
|
<step_error_handling>
|
</step_error_handling>
|
<slave-step-copy-partition-distribution>
|
</slave-step-copy-partition-distribution>
|
<slave_transformation>N</slave_transformation>
|
<attributes/>
|
</transformation>
|