diff --git a/config/inputs.disabled/yaml/05.files.processed.day.yaml b/config/inputs.disabled/yaml/05.files.processed.day.yaml new file mode 100644 index 0000000..d2b42ac --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processed.day.yaml @@ -0,0 +1,11 @@ +input: + name : RAID Files Processed by day in last 7 days + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesProcessedByDay + number: 7 + diff --git a/config/inputs.disabled/yaml/05.files.processed.hour.yaml b/config/inputs.disabled/yaml/05.files.processed.hour.yaml new file mode 100644 index 0000000..df60441 --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processed.hour.yaml @@ -0,0 +1,10 @@ +input: + name : RAID Files Processed by hour in last 48h + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesProcessedByHour + number: 48 diff --git a/config/inputs.disabled/yaml/05.files.processed.twice.yaml b/config/inputs.disabled/yaml/05.files.processed.twice.yaml new file mode 100644 index 0000000..71b431d --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processed.twice.yaml @@ -0,0 +1,11 @@ +input: + name : RAID Files Processed more than once last 7 days + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesProcessedMoreThanOnce + number: 7 + diff --git a/config/inputs.disabled/yaml/05.files.processederror.day.yaml b/config/inputs.disabled/yaml/05.files.processederror.day.yaml new file mode 100644 index 0000000..ce6407f --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processederror.day.yaml @@ -0,0 +1,11 @@ +input: + name : RAID Files in Error by day in last 7 days + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesInErrorByDay + number: 7 + diff --git a/config/inputs.disabled/yaml/05.files.processederror.hour.yaml b/config/inputs.disabled/yaml/05.files.processederror.hour.yaml new file mode 100644 index 0000000..8c1cd41 --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processederror.hour.yaml @@ -0,0 +1,11 @@ +input: + name : RAID Files in Error by hour in last 48 hours + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesInErrorByHour + number: 48 + diff --git a/config/inputs.disabled/yaml/05.files.processing.backlog.yaml b/config/inputs.disabled/yaml/05.files.processing.backlog.yaml new file mode 100644 index 0000000..5d1367e --- /dev/null +++ b/config/inputs.disabled/yaml/05.files.processing.backlog.yaml @@ -0,0 +1,11 @@ +input: + name : RAID Files Backlog by day in last 7 days + cron : "*/10 * * * *" + waitForFinish: true + onlyOnEvent : true + execFrom : nInput_FilesProcessed + execArgs : + key : FMS ADM + attrName: FilesBacklog + number: 7 + diff --git a/config/objects.assets/ninputfilesprocessed/nInput_FilesProcessed_config.yml b/config/objects.assets/ninputfilesprocessed/nInput_FilesProcessed_config.yml new file mode 100644 index 0000000..c08b31c --- /dev/null +++ b/config/objects.assets/ninputfilesprocessed/nInput_FilesProcessed_config.yml @@ -0,0 +1,544 @@ +FilesProcessedByHour: + name: Files processed by hour + oracle: > + select "Stream", "Loading Process", "Date", "Hour", + count("INPUT_ID") "Total Files Processed", + max(aux."Number of Events") "Max Events per File", + min(aux."Number of Events") "Min Events per File", + avg(aux."Number of Events") "Avg Events per File", + sum(aux."Number of Events") "Total Events Processed", + sum(DECODE(NVL(aux."Number of Events", 0), 0, 1, 0)) "Total Files with 0 records" + from + ( + select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + TO_CHAR(dcc."MODIFIED_DATE", 'HH24') "Hour", + dci."INPUT_ID", + sum(cast(dcc."PROGRESS" as integer)) "Number of Events" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= TRUNC(sysdate, 'HH') - {{number}}/24 + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + TO_CHAR(dcc."MODIFIED_DATE", 'HH24'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date", + "Hour" + order by "Stream", "Loading Process", "Date" DESC, "Hour" desc + + postgresql: > + select "Stream", "Loading Process", "Date", "Hour", + count("INPUT_ID") "Total Files Processed", + max(aux."Number of Events") "Max Events per File", + min(aux."Number of Events") "Min Events per File", + avg(aux."Number of Events") "Avg Events per File", + sum(aux."Number of Events") "Total Events Processed", + sum(case (COALESCE(aux."Number of Events", 0)) when 0 then 1 else 0 end) "Total Files with 0 records" + from + ( + select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + TO_CHAR(dcc."MODIFIED_DATE", 'HH24') "Hour", + dci."INPUT_ID", + sum(cast(dcc."PROGRESS" as integer)) "Number of Events" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('HOUR', CURRENT_DATE) - INTERVAL '{{number}} Hours' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + TO_CHAR(dcc."MODIFIED_DATE", 'HH24'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date", + "Hour" + order by "Stream", "Loading Process", "Date" desc, "Hour" desc + +FilesProcessedByDay: + name: Files processed by day + oracle: > + select "Stream", "Loading Process", "Date", + count("INPUT_ID") "Total Files Processed", + max(aux."Number of Events") "Max Events per File", + min(aux."Number of Events") "Min Events per File", + avg(aux."Number of Events") "Avg Events per File", + sum(aux."Number of Events") "Total Events Processed", + sum(DECODE(NVL(aux."Number of Events", 0), 0, 1, 0)) "Total Files with 0 records" + from + ( + select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID", + sum(cast(dcc."PROGRESS" as integer)) "Number of Events" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= TRUNC(sysdate) - {{number}} + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" DESC + + postgresql: > + select "Stream", "Loading Process", "Date", + count("INPUT_ID") "Total Files Processed", + max(aux."Number of Events") "Max Events per File", + min(aux."Number of Events") "Min Events per File", + avg(aux."Number of Events") "Avg Events per File", + sum(aux."Number of Events") "Total Events Processed", + sum(case (COALESCE(aux."Number of Events", 0)) when 0 then 1 else 0 end) "Total Files with 0 records" + from + (select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID", + sum(cast(dcc."PROGRESS" as integer)) "Number of Events" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('DAY', CURRENT_DATE) - INTERVAL '{{number}} Days' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID" + ) aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" desc + +FilesInErrorByHour: + name: Files in error by hour + oracle: > + select "Stream", "Loading Process", "Date", "Hour", + count(distinct "INPUT_ID") "Total Files in Error" + from + ( + select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + TO_CHAR(dcc."MODIFIED_DATE", 'HH24') "Hour", + dci."INPUT_ID" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 3 + and dcc."MODIFIED_DATE" >= TRUNC(sysdate, 'HH') - {{number}}/24 + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + TO_CHAR(dcc."MODIFIED_DATE", 'HH24'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date", + "Hour" + order by "Stream", "Loading Process", "Date" desc, "Hour" desc + + postgresql: > + select "Stream", "Loading Process", "Date", "Hour", + count(distinct "INPUT_ID") "Total Files in Error" + from + ( + select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + TO_CHAR(dcc."MODIFIED_DATE", 'HH24') "Hour", + dci."INPUT_ID" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 3 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('HOUR', CURRENT_DATE) - INTERVAL '{{number}} Hours' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + TO_CHAR(dcc."MODIFIED_DATE", 'HH24'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date", + "Hour" + order by "Stream", "Loading Process", "Date" desc, "Hour" desc + +FilesInErrorByDay: + name: Files in error by day + oracle: > + select "Stream", "Loading Process", "Date", + count(distinct "INPUT_ID") "Total Files Processed" + from + (select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 3 + and dcc."MODIFIED_DATE" >= TRUNC(sysdate) - {{number}} + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" desc + + postgresql: > + select "Stream", "Loading Process", "Date", + count(distinct "INPUT_ID") "Total Files in Error" + from + (select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID", + sum(cast(dcc."PROGRESS" as integer)) "Number of Events" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 3 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('DAY', CURRENT_DATE) - INTERVAL '{{number}} Days' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" desc + +FilesProcessedMoreThanOnce: + name: Files processed more than once + oracle: > + select atos."SHORT_NAME" "Stream", + atoslp."SHORT_NAME" "Loading Process", + dci."SOURCE", + TO_CHAR(MIN(dcc."MODIFIED_DATE"), 'YYYY-MM-DD HH24:MI:SS') "Min Date", + TO_CHAR(MAX(dcc."MODIFIED_DATE"), 'YYYY-MM-DD HH24:MI:SS') "Max Date", + COUNT(DISTINCT dcc."INPUT_ID") "Times Processed" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= TRUNC(SYSDATE) - {{number}} + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + dci."SOURCE" + having count(0) > 1 + ORDER BY "Stream", "Loading Process", "Times Processed" DESC + + postgresql: > + select atos."SHORT_NAME" "Stream", + atoslp."SHORT_NAME" "Loading Process", + dci."SOURCE", + TO_CHAR(MIN(dcc."MODIFIED_DATE"), 'YYYY-MM-DD HH24:MI:SS') "Min Date", + TO_CHAR(MAX(dcc."MODIFIED_DATE"), 'YYYY-MM-DD HH24:MI:SS') "Max Date", + COUNT(DISTINCT dcc."INPUT_ID") "Times Processed" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 2 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('DAY', CURRENT_DATE) - INTERVAL '{{number}} Days' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + dci."SOURCE" + having count(0) > 1 + ORDER BY "Stream", "Loading Process", "Times Processed" DESC + +FilesBacklog: + name: Files backlog + oracle: > + select "Stream", "Loading Process", "Date", + count(distinct "INPUT_ID") "Total Files Backlog", + MIN("Min Date") "Min Date" + from + (select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID", + MIN(TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD HH24:MI')) "Min Date" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 0 + and dcc."MODIFIED_DATE" >= TRUNC(sysdate) - {{number}} + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and DBMS_LOB.instr(ATO."OBJ_DEFINITION", 'n="dataStreamType">file<') > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" desc + + postgresql: > + select "Stream", "Loading Process", "Date", + count(distinct "INPUT_ID") "Total Files Backlog", + MIN("Min Date") "Min Date" + from + (select + dci."STREAM_UUID", + atos."SHORT_NAME" "Stream", + atoslp."OBJ_SPEC_UUID" "Loading Process ID", + atoslp."SHORT_NAME" "Loading Process", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD') "Date", + dci."INPUT_ID", + MIN(TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD HH24:MI')) "Min Date" + from + "DP_C_INPUT" dci , + "AF_T_OBJ_SPEC" atos, + "DP_C_CONTROL" dcc, + "AF_T_OBJ_SPEC" atoslp + where + dci."STREAM_UUID" = atos."OBJ_SPEC_UUID" + and dci."INPUT_ID" = dcc."INPUT_ID" + and dcc."STATUS" = 0 + and dcc."MODIFIED_DATE" >= DATE_TRUNC('DAY', CURRENT_DATE) - INTERVAL '{{number}} Days' + and dcc."LOADING_PROCESS_SPEC_ID" = atoslp."OBJ_SPEC_ID" + and atoslp."OBJ_TYPE_NAME" = 'DATAPUMP.LoadingProcess' + and exists (select 0 + from "AF_T_OBJ" ato + where ATOS."OBJ_SPEC_UUID" = ATO."OBJ_SPEC_UUID" + and ATOS."LAST_VERSION" = ATO."VERSION" + and position ( 'n="dataStreamType">file<' in ATO."OBJ_DEFINITION") > 0) + group by + dci."STREAM_UUID", + atos."SHORT_NAME", + atoslp."OBJ_SPEC_UUID", + atoslp."SHORT_NAME", + TO_CHAR(dcc."MODIFIED_DATE", 'YYYY-MM-DD'), + dci."INPUT_ID") aux + group by "STREAM_UUID", + "Stream", + "Loading Process ID", + "Loading Process", + "Date" + order by "Stream", "Loading Process", "Date" desc \ No newline at end of file diff --git a/config/objects/nInput_FilesProcessed.js b/config/objects/nInput_FilesProcessed.js new file mode 100644 index 0000000..50dd690 --- /dev/null +++ b/config/objects/nInput_FilesProcessed.js @@ -0,0 +1,193 @@ +// Author: Nuno Aguiar, Andreia Brizida + +/** + * + * nattrmon.nInput_FilesProcessed(aMap) : nInput + * aMap is composed of:\ + * - attrName (results to be presented, available options are: FilesProcessedByHour, FilesProcessedByDay, FilesInErrorByHour, FilesInErrorByDay, FilesProcessedMoreThanOnce, FilesBacklog) + * - chKeys (the channel of keys to use)\ + * - key (the key for the database access)\ + * - key.parent (in alternative provide the parent key (e.g. RAS))\ + * - key.child (in alternative provide the child key synonym (e.g. db.app))\ + * - dontUseKey (boolean to indicate if a key field should not be added in all records returned with chKeys) + * - number (number of days/hours ago the files were processed/errored out) + * + */ + +var nInput_FilesProcessed = function(aMap) { + if (!isNull(aMap) && isMap(aMap)) { + this.params = aMap; + } else { + this.params = {}; + } + this.params.dontUseKey = _$(this.params.dontUseKey, "dontUseKey").isBoolean().default(isDef(this.params.key)) + + + if (isUnDef(this.params.attrName)) + { + logErr("nInput_FilesProcessed | MAIN: You need to provide the attrName (Attribute Name).") + return 0 + } + + this.var_yaml_file = nattrmon.getConfigPath("objects.assets/ninputfilesprocessed/") + "/objects.assets/ninputfilesprocessed/nInput_FilesProcessed_config.yml" + + try { + this.attributeObject = io.readFileYAML(this.var_yaml_file); + } catch (e) { + logErr("nInput_FilesProcessed | MAIN: Error reading configuration file from " + this.var_yaml_file + ".") + throw e + } + + if (isUnDef(this.attributeObject[this.params.attrName].name)) + { + logErr("nInput_FilesProcessed | MAIN: Cannot find configuration for attrName " + this.params.attrName + ".") + return 0 + } + + if (isUnDef(this.params.number)) + { + this.params.number = 1 + } + + if (isUnDef(this.params.attrTemplate)) + if (isDef(this.params.key)) + this.params.attrTemplate = "RAID/" + this.attributeObject[this.params.attrName].name + else + this.params.attrTemplate = "RAID/DB {{key}}/" + this.attributeObject[this.params.attrName].name + + nInput.call(this, this.input) +} +inherit(nInput_FilesProcessed, nInput) + +nInput_FilesProcessed.prototype.get = function(aKey, parent, ret, scope) { + var parent2 = parent + try { + var queries = this.attributeObject; + queries = queries[parent.attrName]; + } catch (e) { + logErr("nInput_FilesProcessed | GET: Error reading configuration file from " + this.var_yaml_file + ".") + throw e + } + + try { + var res + + // Determine the database name + nattrmon.useObject(aKey, function(aDb) { + res = String(aDb.getConnect().getMetaData().getDatabaseProductName()).toLowerCase() + return true + }) + // Keep the sqls for the specific database type + var nquery = queries[res] + // Variables to be mapped on the queries + var data = { number: parent.number} + + // If object poll association is provided + if (isObject(parent.key) && isDef(parent.key.parent) && isDef(parent.key.child)) { + // Retrieve association + parent.objectPoolKey = nattrmon.getAssociatedObjectPool(parent.key.parent, parent.key.child) + } else { + // Keep current key + parent.objectPoolKey = parent.key + } + + try { + var naAttr = templify(parent.attrTemplate, merge(parent, { query: nquery })) + if (isDef(parent.objectPoolKey)) { + if (nattrmon.isObjectPool(parent.objectPoolKey)) { + nattrmon.useObject(parent.objectPoolKey, function(aDb) { + try { + if (isDef(aDb.convertDates)) aDb.convertDates(true) + res = aDb.q(templify(nquery, data)).results + } catch (e) { + logErr("nInput_FilesProcessed | Error while retriving DB query from '" + parent.objectPoolKey + "' for '" + parent.objectPoolKey + ": " + e.message) + logErr("nInput_FilesProcessed | Key = '" + parent.objectPoolKey + "' DB query = '" + nquery + "'") + throw e + } + + // Properly end transaction (issue #93) + aDb.rollback() + return true + }) + } else { + logWarn("nInput_FilesProcessed | Object pool key = '" + parent.objectPoolKey + "' not found.") + } + } + // Handle result and adding or not the key field + if (isUnDef(res)) { + ret[naAttr] = __ + } else { + + // Verify if key field exists in res + if (!parent.dontUseKey) { + + var keyFieldExists = false + res = res.map(record => { + if (!keyFieldExists) keyFieldExists = Object.keys(record).indexOf("key") >= 0 + record.key = parent.objectPoolKey + return record + }) + if (keyFieldExists) logWarn("nInput_FilesProcessed | Result from '" + naAttr + "' for '" + parent.objectPoolKey + "' contains a 'key' field. It will be overwritten (to modify this behaviour use dontUseKey=true).") + + // Check if needs concat with previous results of other keys + if (isDef(ret[naAttr])) { + sync(() => { + ret[naAttr] = ret[naAttr].concat(res) + }, ret[naAttr]) + } else { + ret[naAttr] = res + } + + } else { + // Simplify result if one single column and/or one single value output + if (res.length == 1 && Object.keys(res[0]).length == 1) { + ret[naAttr] = res[0][Object.keys(res[0])[0]] + } else { + if (res.length == 1) { + ret[naAttr] = res[0] + } else { + ret[naAttr] = res + } + } + } + } + } catch(ee) { + logErr("nInput_FilesProcessed | Problem trying to process query '" + nquery + "' for '" + parent.objectPoolKey + ": " + ee) + } + return 1 + } catch (e) { + logErr("nInput_FilesProcessed | Error while retriving DB queries from '" + parent2.objectPoolKey + "': " + stringify(e)) + if (isUnDef(parent2.objectPoolKey)) { + nattrmon.declareMonitoredObjectDirty(parent2.monitoredObjectKey) + parent2.db = nattrmon.getMonitoredObject(parent2.monitoredObjectKey) + } + } +} + +nInput_FilesProcessed.prototype.input = function(scope, args) { + var ret = {} + var parent = this + + ow.template.addHelper("toDate", (s) => { if (isDef(s) && s != null && new Date(s) != null) return "to_date('" + ow.format.fromDate(new Date(s), 'yyyyMMddHHmmss') + "', 'YYYYMMDDHH24MISS')"; else return null; }) + + if (isDef(this.params.chKeys)) { + var arr = [] + $ch(this.params.chKeys).forEach((k, v) => { + var m = { + key : k.key, + attrName : parent.params.attrName, + attrTemplate: parent.params.attrTemplate, + dontUseKey : parent.params.dontUseKey + } + m = merge(v, m) + parent.get(k.key, m, ret, scope) + }) + } else { + //parent.get(parent.key, parent.params, ret, scope) + parent.get(parent.params.key, parent.params, ret, scope) + } + + ow.template.delHelper("toDate") + + return ret +} \ No newline at end of file