Dependency | Reason |
---|---|
Dagrun Running | Task instance's dagrun was not in the 'running' state but in the state 'success'. |
Task Instance State | Task is in the 'success' state which is not a valid state for execution. The task must be cleared in order to be run. |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 | def getMongoDB(**context):
token = context.get("ti").xcom_pull(key="token")
response = requests.get(
url=f"{dRoW_api_end_url}/api/module/document-export/airflow/workflow/64ba0dc1ef64f30c95e70223?export_type=0",
headers={
"x-access-token": f"Bearer {token}",
"ICWPxAccessKey": "nd@201907ICWP_[1AG:4UdI){n=b~"
})
RISC_Data = json.loads(response.text)
Mapping= {
"Date of Inspection:" : "a3_date_time",
"Follow up Summary": "c_summary_of_follow_up_actions",
}
saftey_cats=[
"1. 進出途徑 Access and Egress:",
"2. 一般事項 General",
"3. 高空作業 Working at Heigh:",
"4. 起重機械及起重裝置Lifting Appliances & Lifting Gear:",
"5. 電力 Electricity:",
"6. 泥土工程 Earthwork:",
"7. 機器 Machinery:",
"8. 防火措施 Fire Preventions:",
"9. 健康 Health:",
"10. 個人防護設備 Personal Protective Equipment:",
"11. 密閉空間 Confined Space:",
"12. 化學物品:",
"13. 福利設施:",
]
host = 'drowdatewarehouse.crlwwhgepgi7.ap-east-1.rds.amazonaws.com'
# User name of the database server
dbUserName = 'dRowAdmin'
# Password for the database user
dbUserPassword = 'drowsuper'
# Name of the database
database = 'drowDateWareHouse'
# Character set
charSet = "utf8mb4"
port = "5432"
conn_string = ('postgres://' +
dbUserName + ':' +
dbUserPassword +
'@' + host + ':' + port +
'/' + database)
db = create_engine(conn_string)
conn = db.connect()
df = pd.DataFrame()
with conn as conn:
for x in RISC_Data:
df_nested_list = json_normalize(x['data'])
df2 = df_nested_list.reindex(columns=Mapping.keys())
if len(x['ApproveLogSummary']) > 0:
request_data = [data for data in x['ApproveLogSummary'] if data.get('statusName')=="B : Checked by RSS"]
if len(request_data) > 0 and 'from' in request_data[-1]:
df2['sup_rep_signed_date'] = request_data[len(request_data)-1]['from']
else:
df2['sup_rep_signed_date'] = None
if len(request_data) > 0 and 'to' in request_data[-1]:
df2['contractor_rep_signed_date'] = request_data[len(request_data)-1]['to']
else:
df2['contractor_rep_signed_date'] = None
else:
df2['sup_rep_signed_date'] = None
df2['contractor_rep_signed_date'] = None
if (len(x['data']['Follow up Summary']) > 0):
total_late_retification = 0
for summaryData in x['data']['Follow up Summary']:
if ("Agreed Due Date for Completion" in summaryData and "Agreed Due Date for Completion" in summaryData and not (summaryData["Agreed Due Date for Completion"]!='') and (not (summaryData["Date Completed"]!='')) and (summaryData["Agreed Due Date for Completion"].astype('datetime64[ns]') < summaryData["Date Completed"].astype('datetime64[ns]')).bool()):
total_late_retification += 1
df2['total_late_retification'] = total_late_retification
else:
total_late_retification = 0
if (not df2['contractor_rep_signed_date'].isnull().bool() and not df2['Date of Inspection:'].isnull().bool()):
df2['days_complete'] = (((df2['contractor_rep_signed_date'].astype('datetime64[ns]') -
df2['Date of Inspection:'].astype('datetime64[ns]'))/ np.timedelta64(1, 'h'))/24).round(2)
if df2['days_complete'].isnull().bool() or df2['days_complete'].lt(0).bool():
df2['days_complete'] = 0
else:
df2['days_complete'] = None
df4=pd.DataFrame()
for saftey_cat in saftey_cats:
df3=df2.copy()
complete = 0
incomplete = 0
if not df2['sup_rep_signed_date'].isnull().bool():
if (len(x['data'][saftey_cat]) > 0):
for record in x['data'][saftey_cat]:
if record[saftey_cat.split(" ")[0] +' Result'] != '':
complete += 1
else:
if (len(x['data'][saftey_cat]) > 0):
for record in x['data'][saftey_cat]:
if record[saftey_cat.split(" ")[0] +' Result'] != '':
incomplete += 1
df3['saftey_cat'] = saftey_cat
df3['saftey_cat' + '_' + 'complete'] = complete
df3['saftey_cat' + '_' + 'incomplete'] = incomplete
df4 = df4.append(df3)
df2=df2.append(df4)
df = df.append(df2)
df.rename(columns=Mapping, inplace=True)
df['sup_rep_signed_date']=df['sup_rep_signed_date'].apply(pd.to_datetime)
df['contractor_rep_signed_date']=df['contractor_rep_signed_date'].apply(pd.to_datetime)
df['a3_date_time']=df['a3_date_time'].apply(pd.to_datetime)
# Remove all rows with column 'safety_cat' is null
df = df[df['saftey_cat'].notnull()]
df.columns = df.columns.str.replace(' ', '_').str.replace('.', '').str.replace('(', '_').str.replace(')', '').str.replace('%', 'percent').str.replace('/', '_')
df.drop(['c_summary_of_follow_up_actions'], axis=1, inplace=True)
df.to_sql('safety_walk_cv202302', con=conn, if_exists='replace', index= False)
|
Attribute | Value |
---|---|
dag_id | cv202302_safety_walk |
duration | 52.952377 |
end_date | 2024-09-29 15:03:00.538710+00:00 |
execution_date | 2024-09-28T15:00:00+00:00 |
executor_config | {} |
generate_command | <function TaskInstance.generate_command at 0x7f152f9bf320> |
hostname | 63fbafbc3109 |
is_premature | False |
job_id | 9822 |
key | ('cv202302_safety_walk', 'getMongoDB', <Pendulum [2024-09-28T15:00:00+00:00]>, 2) |
log | <Logger airflow.task (INFO)> |
log_filepath | /usr/local/airflow/logs/cv202302_safety_walk/getMongoDB/2024-09-28T15:00:00+00:00.log |
log_url | http://localhost:8080/admin/airflow/log?execution_date=2024-09-28T15%3A00%3A00%2B00%3A00&task_id=getMongoDB&dag_id=cv202302_safety_walk |
logger | <Logger airflow.task (INFO)> |
mark_success_url | http://localhost:8080/success?task_id=getMongoDB&dag_id=cv202302_safety_walk&execution_date=2024-09-28T15%3A00%3A00%2B00%3A00&upstream=false&downstream=false |
max_tries | 1 |
metadata | MetaData(bind=None) |
next_try_number | 2 |
operator | PythonOperator |
pid | 2606103 |
pool | default_pool |
prev_attempted_tries | 1 |
previous_execution_date_success | 2024-09-27 15:00:00+00:00 |
previous_start_date_success | 2024-09-28 15:01:15.992143+00:00 |
previous_ti | <TaskInstance: cv202302_safety_walk.getMongoDB 2024-09-27 15:00:00+00:00 [success]> |
previous_ti_success | <TaskInstance: cv202302_safety_walk.getMongoDB 2024-09-27 15:00:00+00:00 [success]> |
priority_weight | 1 |
queue | default |
queued_dttm | 2024-09-29 15:01:57.737963+00:00 |
raw | False |
run_as_user | None |
start_date | 2024-09-29 15:02:07.586333+00:00 |
state | success |
task | <Task(PythonOperator): getMongoDB> |
task_id | getMongoDB |
test_mode | False |
try_number | 2 |
unixname | airflow |
Attribute | Value |
---|---|
dag | <DAG: cv202302_safety_walk> |
dag_id | cv202302_safety_walk |
depends_on_past | False |
deps | {<TIDep(Not In Retry Period)>, <TIDep(Trigger Rule)>, <TIDep(Previous Dagrun State)>} |
do_xcom_push | True |
downstream_list | [] |
downstream_task_ids | set() |
None | |
email_on_failure | True |
email_on_retry | True |
end_date | None |
execution_timeout | None |
executor_config | {} |
extra_links | [] |
global_operator_extra_link_dict | {} |
inlets | [] |
lineage_data | None |
log | <Logger airflow.task.operators (INFO)> |
logger | <Logger airflow.task.operators (INFO)> |
max_retry_delay | None |
on_failure_callback | None |
on_retry_callback | None |
on_success_callback | None |
op_args | [] |
op_kwargs | {'name': 'Dylan'} |
operator_extra_link_dict | {} |
operator_extra_links | () |
outlets | [] |
owner | airflow |
params | {} |
pool | default_pool |
priority_weight | 1 |
priority_weight_total | 1 |
provide_context | True |
queue | default |
resources | None |
retries | 1 |
retry_delay | 0:05:00 |
retry_exponential_backoff | False |
run_as_user | None |
schedule_interval | 0 15 * * * |
shallow_copy_attrs | ('python_callable', 'op_kwargs') |
sla | None |
start_date | 2023-01-17T00:00:00+00:00 |
subdag | None |
task_concurrency | None |
task_id | getMongoDB |
task_type | PythonOperator |
template_ext | [] |
template_fields | ('templates_dict', 'op_args', 'op_kwargs') |
templates_dict | None |
trigger_rule | all_success |
ui_color | #ffefeb |
ui_fgcolor | #000 |
upstream_list | [<Task(PythonOperator): getDrowToken>] |
upstream_task_ids | {'getDrowToken'} |
wait_for_downstream | False |
weight_rule | downstream |