DAG: nec_section_of_work

schedule: 0 0,4,8,11,16 * * *


nec_section_of_work

Toggle wrap
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
try:

    from datetime import timedelta
    from airflow import DAG
    
    from airflow.operators.python_operator import PythonOperator
    from airflow.operators.http_operator import SimpleHttpOperator
    from datetime import datetime
    from pandas.io.json import json_normalize
    from airflow.operators.postgres_operator import PostgresOperator

    import pandas as pd
    import json
    import requests
    import numpy as np

    import psycopg2
    from sqlalchemy import create_engine
    # print("All Dag moudules are sucessfully imported")

except Exception as e:
    print("Error {} ".format(e))

dRoW_api_end_url = "https://drow.cloud"

def getDrowToken(**context):
    # response = SimpleHttpOperator(
    #     task_id="getDrowToken",
    #     http_conn_id="getDrowToken",
    #     endpoint="https://uat2.drow.cloud/api/auth/authenticate", 
    #     method="POST",
    #     data={
    #     "username": "icwp2@drow.cloud",
    #     "password": "dGVzdDAxQHRlc3QuY29t"
    #     },
    #     xcom_push=True,
    # )

    response = requests.post(
    url=f"{dRoW_api_end_url}/api/auth/authenticate",
    data={
    "username": "icwp2@drow.cloud",
    "password": "dGVzdDAxQHRlc3QuY29t"
    }
    ).json()
    context["ti"].xcom_push(key="token", value=response['token'])
    # return 'DLLM{}'.format(response)


def getMongoDB(**context):
    token = context.get("ti").xcom_pull(key="token")
    response = requests.get(
    url=f"{dRoW_api_end_url}/api/sheets/63fc7888d73f2b0c83bbfa57?with_records=true&fields=",
    headers={
    "x-access-token": f"Bearer {token}",
    }
    )
    # # print('got_data')
    sheet = json.loads(response.text)
    headers = sheet['header']
    record = sheet['record']
    dataToExtract=[]
    for d in record: 
        objectToPush = {}
        for v in d['values']:
            for c in headers:
                colNameToExtract = c['colName']
                if v['colName'] == colNameToExtract:
                    # # print(v)
                    if v.get('multValue') != None:
                        if v['multValue'] == True:
                            if v['colType'] == 'Table':
                                tObjectArray = []
                                for t in v['tableValue']:
                                    tObjectToPush = {}
                                    for s in t['subValues']:
                                        tObjectToPush[s['colName']] = s.value
                                    tObjectArray.push(tObjectToPush)
                            else:
                                objectToPush[v['colName']] = v['valueArray']
                        else:
                            if v.get('value') != None:
                                if v.get('value') == 'NA':
                                    objectToPush[v['colName']] = None
                                else:
                                    objectToPush[v['colName']] = v['value']
                            else:
                                objectToPush[v['colName']] = None
                    else:
                        if v.get('value') != None:
                            if v.get('value') == 'NA':
                                objectToPush[v['colName']] = None
                            else:
                                objectToPush[v['colName']] = v['value']
                        else:
                            objectToPush[v['colName']] = None
        dataToExtract.append(objectToPush)
    # # print(len(dataToExtract))
    # # print(dataToExtract[0].keys())
    Data= dataToExtract
    
    token = context.get("ti").xcom_pull(key="token")
    response = requests.get(
    url=f"{dRoW_api_end_url}/api/sheets/63fc928ed73f2b0c83bc263d?with_records=true&fields=",
    headers={
    "x-access-token": f"Bearer {token}",
    }
    )
    # # print('got_data')
    sheet = json.loads(response.text)
    headers = sheet['header']
    record = sheet['record']
    dataToExtract=[]
    for d in record: 
        objectToPush = {}
        for v in d['values']:
            for c in headers:
                colNameToExtract = c['colName']
                if v['colName'] == colNameToExtract:
                    # # print(v)
                    if v.get('multValue') != None:
                        if v['multValue'] == True:
                            if v['colType'] == 'Table':
                                tObjectArray = []
                                for t in v['tableValue']:
                                    tObjectToPush = {}
                                    for s in t['subValues']:
                                        tObjectToPush[s['colName']] = s.value
                                    tObjectArray.push(tObjectToPush)
                            else:
                                objectToPush[v['colName']] = v['valueArray']
                        else:
                            if v.get('value') != None:
                                if v.get('value') == 'NA':
                                    objectToPush[v['colName']] = None
                                else:
                                    objectToPush[v['colName']] = v['value']
                            else:
                                objectToPush[v['colName']] = None
                    else:
                        if v.get('value') != None:
                            if v.get('value') == 'NA':
                                objectToPush[v['colName']] = None
                            else:
                                objectToPush[v['colName']] = v['value']
                        else:
                            objectToPush[v['colName']] = None
        dataToExtract.append(objectToPush)
    # # print(len(dataToExtract))
    # # print(dataToExtract[0].keys())
    Data2 = dataToExtract
    # df['This Amount']=df['This Amount'].apply(pd.to_numeric)
    # df['Certified Qty']=df['Certified Qty'].apply(pd.to_numeric)
    # df['Certified Qty']=df['Certified Qty'].round(2)
    # df['Completed Qty']=df['Completed Qty'].apply(pd.to_numeric)
    # df['Rate']=df['Rate'].apply(pd.to_numeric)
    # df['Date']=df['Date'].apply(pd.to_datetime)
    # df.columns = df.columns.str.replace(' ', '_').str.replace('.', '_').str.replace('(', '_').str.replace(')', '').str.replace('%', 'percent')
    # df['Certified_percent']=df['Certified_percent'].apply(pd.to_numeric)

    # host                  = 'drowdatewarehouse.crlwwhgepgi7.ap-east-1.rds.amazonaws.com'  

    #     # User name of the database server
    # dbUserName            = 'dRowAdmin'  

    # # Password for the database user
    # dbUserPassword        = 'drowsuper'  

    # # Name of the database 
    # database              = 'drowDateWareHouse'

    # # Character set
    # charSet               = "utf8mb4"  

    # port                  = "5432"

    # #cursor Type
    # cusrsorType            = pymysql.cursors.DictCursor



    #create_engine('mysql+mysqldb://root:password@localhost:3306/mydbname', echo = False)
    # conn_string = ('postgres://' +
    #                        dbUserName + ':' + 
    #                        dbUserPassword +
    #                        '@' + host + ':' + port +
    #                        '/' + database)

    # # df = context.get("ti").xcom_pull(key="InsertData")
    # # # print(df)
    # # conn_string = 'postgres://user:password@host/data1'
    
    # db = create_engine(conn_string)
    # conn = db.connect()
    # # # print('db connected')
    # with conn as conn:
    #     df.to_sql('5wsd21_payment', con=conn, if_exists='replace')
    # conn.close()
    # Data = [
    #     {
    #         " id ": " 4 ",
    #         " Contract Sum ": " 2,148,000,000.00 ",
    #         " CE allowed ": " 176,390,000.00 ",
    #         " CE implemented (from dRoW) ": " 56,141,000.00 ",
    #         " Expenditure so far ": " 735,592,221.93 ",
    #         " Money spent ": " 0.32 ",
    #         " Forecast of the final total of the Prices ": "2,322,000,000",
    #         " Approved forecast total of the Prices ": "2,148,000,000",
    #         " Forecast of the final Prices for the Work Done to Date (PWDD) ": "2,378,000,000",
    #         " Cumulative PWDD ": "818,200,000",
    #         " Total Pain ": "56,000,000",
    #         " Contractor Pain Share ": "0.5",
    #         "starting date": "2020-08-14",
    #         "ori comp date": "2024-09-12"
    #     },
    #     {
    #         " id ": " 1 ",
    #         " Contract Sum ": " 2,578,000,000.00 ",
    #         " CE allowed ": " 403,750,000.00 ",
    #         " Expenditure so far ": " 1,078,678,501.76 ",
    #         " Money spent ": " 0.36 ",
    #         "starting date": "2019-12-06",
    #         "ori comp date": "2025-07-06"
    #     },
    #     {
    #         " id ": " 2 ",
    #         " Contract Sum ": " 598,000,000.00 ",
    #         " CE allowed ": " 65,926,000.00 ",
    #         " Expenditure so far ": " 286,770,200.96 ",
    #         " Money spent ": " 0.43 ",
    #         "starting date": "2020-02-17",
    #         "ori comp date": "2024-12-25"
    #     },
    #     {
    #         " id ": " 3 ",
    #         " CE allowed ": " 81,016,430.00 ",
    #         " Expenditure so far ": " 275,285,782.50 ",
    #         " Money spent ": " 3.40 ",
    #         "starting date": "2019-12-19",
    #         "ori comp date": "2022-10-18",
    #         " completed_or_incomplete ": " completed "
    #     },
    #     {
    #         " id ": " 5 ",
    #         " Contract Sum ": " 2,268,000,000.00 ",
    #         " CE allowed ": " 259,571,500.00 ",
    #         " CE implemented (from dRoW) ": " 227,858,000.00 ",
    #         " Expenditure so far ": " 1,116,222,245.75 ",
    #         " Money spent ": " 0.44 ",
    #         "starting date": "2020-03-02",
    #         "ori comp date": "2024-08-30"
    #     },
    #     {
    #         " id ": " 6 ",
    #         " Contract Sum ": " 162,000,000.00 ",
    #         " CE allowed ": " 72,661,034.85 ",
    #         " CE implemented (from dRoW) ": " 43,859,000.00 ",
    #         " Expenditure so far ": " 200,928,968.10 ",
    #         " Money spent ": " 0.86 ",
    #         "starting date": "2019-09-27",
    #         "ori comp date": "2022-02-27",
    #         " completed_or_incomplete ": " completed "
    #     },
    #     {
    #         " id ": " 7 ",
    #         " Contract Sum ": " 315,000,000.00 ",
    #         " CE allowed ": " 76,082,639.58 ",
    #         " CE implemented (from dRoW) ": " 35,759,000.00 ",
    #         " Expenditure so far ": " 152,244,910.49 ",
    #         " Money spent ": " 0.39 ",
    #         "starting date": "2020-08-31",
    #         "ori comp date": "2023-12-31"
    #     }
    # ]

    # Data2=[
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "KD1",
    #       "Section of Work Description": "Access to C7",
    #       "Contract Duration ": 210,
    #       "Original completion dates": "12-Mar-21",
    #       "Project": "c4",
    #       "conditions to be met": "Formation of construction access for contractor of Contract No. ND/2019/07 to access to his construction site at FLN NDA and contractor of CLP to access to Portion I in accordance with PS 1.158."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "KD2",
    #       "Section of Work Description": "Sewage Station",
    #       "Contract Duration ": 1400,
    #       "Original completion dates": "14-Jun-24",
    #       "Project": "c4",
    #       "conditions to be met": "Completion of sewage pumping station and associated rising mains and sewers necessary for fire service inspection by FSD and testing and commissioning of E&M works pursuant to PS Section 48."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "KD3",
    #       "Section of Work Description": "LYT Interchange FLH",
    #       "Contract Duration ": 1490,
    #       "Original completion dates": "12-Sep-24",
    #       "Project": "c4",
    #       "conditions to be met": "Completion of all works for the opening of section of Fanling Bypass Eastern Section between proposed Lung Yeuk Tau Interchange and Fanling Highway. Temporary road connectivity, if required, shall be provided at Lung Yeuk Tau Interchange to the satisfaction of TD."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "KD4",
    #       "Section of Work Description": "Underpass",
    #       "Contract Duration ": 1700,
    #       "Original completion dates": "10-Apr-25",
    #       "Project": "c4",
    #       "conditions to be met": "Completion of all works necessary for fire service inspection by FSD, testing and commissioning of E&M works of underpass and associated Stormwater Pumping Station pursuant to PS Section 49."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "KD5",
    #       "Section of Work Description": "Traffic Detect Sys",
    #       "Contract Duration ": 1700,
    #       "Original completion dates": "10-Apr-25",
    #       "Project": "c4",
    #       "conditions to be met": "Completion of all works necessary for testing and commissioning and Site Acceptance Test of the TCSS along Fanling Bypass Eastern Section pursuant to PS Sections 45 and 52."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 1",
    #       "Section of Work Description": "Prepare",
    #       "Contract Duration ": 210,
    #       "Original completion dates": "12-Mar-21",
    #       "Project": "c4",
    #       "conditions to be met": "Site clearance work and fencing off of Portion I, formation of vehicular access, design and construct temporary structures along the periphery of Portion I for handover to LandsD, as required in PS 1.160 and the Drawings."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 1",
    #       "Section of Work Description": "Portion I",
    #       "Contract Duration ": 210,
    #       "Original completion dates": "12-Mar-21",
    #       "Project": "c4",
    #       "conditions to be met": "Site clearance work and fencing off of Portion I, formation of vehicular access, design and construct temporary structures along the periphery of Portion I for handover to LandsD, as required in PS 1.160 and the Drawings."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 2",
    #       "Section of Work Description": "Portion W Bus Stop",
    #       "Contract Duration ": 365,
    #       "Original completion dates": "14-Aug-21",
    #       "Project": "c4",
    #       "conditions to be met": "All works within Portion W excluding landscape softworks."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 3",
    #       "Section of Work Description": "Portion K1 incl Landscape",
    #       "Contract Duration ": 1125,
    #       "Original completion dates": "13-Sep-23",
    #       "Project": "c4",
    #       "conditions to be met": "All works within Portion K1 including landscape softworks."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 4",
    #       "Section of Work Description": "Junctions Improvement (Porion Q, R, S, T, U,V, X and Y, J)",
    #       "Contract Duration ": 1150,
    #       "Original completion dates": "08-Oct-23",
    #       "Project": "c4",
    #       "conditions to be met": "All works within Portions Q, R, S, T, U, V, X and Y, and junction improvement works at Sui Wan Road within Portion J."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 5",
    #       "Section of Work Description": "Portion N incl Landscape",
    #       "Contract Duration ": 1490,
    #       "Original completion dates": "12-Sep-24",
    #       "Project": "c4",
    #       "conditions to be met": "All works within Portion N including landscape softworks."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 6",
    #       "Section of Work Description": "Toilet & Refuse Point",
    #       "Contract Duration ": 1490,
    #       "Original completion dates": "12-Sep-24",
    #       "Project": "c4",
    #       "conditions to be met": "The reprovisioned Public Toilet and Refuse Collection Point facility within Portion J."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 7",
    #       "Section of Work Description": "Works for TDS FNL Bypass Eastern Section",
    #       "Contract Duration ": 1790,
    #       "Original completion dates": "09-Jul-25",
    #       "Project": "c4",
    #       "conditions to be met": "All works for the commissioning of traffic detection system along Fanling Bypass Eastern Section pursuant to PS Section 45."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 8",
    #       "Section of Work Description": "Tree Preservation",
    #       "Contract Duration ": 1790,
    #       "Original completion dates": "09-Jul-25",
    #       "Project": "c4",
    #       "conditions to be met": "The Preservation and Protection of Existing Trees pursuant to GS and PS Section 26."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 9",
    #       "Section of Work Description": "Landscape Softworks",
    #       "Contract Duration ": 1790,
    #       "Original completion dates": "09-Jul-25",
    #       "Project": "c4",
    #       "conditions to be met": "All of the landscape softworks not covered by other sections."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 10A",
    #       "Section of Work Description": "Establish K1 Soft Landscape",
    #       "Contract Duration ": 1490,
    #       "Original completion dates": "12-Sep-24",
    #       "Project": "c4",
    #       "conditions to be met": "The establishment works for landscape softworks in Portion K1."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 10B",
    #       "Section of Work Description": "Establish N Soft Landscape",
    #       "Contract Duration ": 1855,
    #       "Original completion dates": "12-Sep-25",
    #       "Project": "c4",
    #       "conditions to be met": "The establishment works for landscape softworks in Portion N."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 10C",
    #       "Section of Work Description": "Establish S9 Soft Landscape",
    #       "Contract Duration ": 2155,
    #       "Original completion dates": "09-Jul-26",
    #       "Project": "c4",
    #       "conditions to be met": "The establishment works for landscape softworks in Section 9."
    #      },
    #      {
    #       "Starting Date": "14-Aug-20",
    #       "Section": "Section 11",
    #       "Section of Work Description": "Remaining Works",
    #       "Contract Duration ": 1790,
    #       "Original completion dates": "09-Jul-25",
    #       "Project": "c4",
    #       "conditions to be met": "The remainder of the works not covered by other sections."
    #      }
    #     ]
        
    # # print('start transform')
    host                  = 'drowdatewarehouse.crlwwhgepgi7.ap-east-1.rds.amazonaws.com'  

        # User name of the database server
    dbUserName            = 'dRowAdmin'  

    # Password for the database user
    dbUserPassword        = 'drowsuper'  

    # Name of the database 
    database              = 'drowDateWareHouse'

    # Character set
    charSet               = "utf8mb4"  

    port                  = "5432"

    # #cursor Type
    # cusrsorType            = pymysql.cursors.DictCursor



    #create_engine('mysql+mysqldb://root:password@localhost:3306/mydbname', echo = False)
    conn_string = ('postgres://' +
                           dbUserName + ':' + 
                           dbUserPassword +
                           '@' + host + ':' + port +
                           '/' + database)

    # df = context.get("ti").xcom_pull(key="InsertData")
    # # print(df)
    # conn_string = 'postgres://user:password@host/data1'
    
    db = create_engine(conn_string)
    conn = db.connect()
    # # print('db connected')
    df = pd.DataFrame()
    _df = pd.DataFrame()
    with conn as conn:
        for x in Data:
            # if len(x.keys()) == 0:
            #     continue
            df_nested_list = json_normalize(x)
            df2=df_nested_list
            # # print('process 1')
            # df2 = pd.DataFrame.from_dict(x)
            # # print(x['data'].keys())
            # df2 = df_nested_list.reindex(columns=Mapping.keys())
                # # print(df2)
                # our dataframe
                # data = {'Name': ['Tom', 'dick', 'harry'],
                #         'Age': [22, 21, 24]}

                # # Create DataFrame
                # df = pd.DataFrame(data)
                # conn = psycopg2.connect(conn_string
                #                         )
                # conn.autocommit = True
                # cursor = conn.cursor()
            
            df = df.append(df2)
        # df['is_latest'].fillna('No',inplace=True)
        # df.rename(columns=Mapping, inplace=True)
        
        df['starting date']=df['starting date'].apply(pd.to_datetime)
        df['ori comp date']=df['ori comp date'].apply(pd.to_datetime)
        # df['Revised Completion Date']=df['Revised Completion Date'].apply(pd.to_datetime)
        df.columns = df.columns.str.replace(' ', '_').str.replace('.', '').str.replace('(', '_').str.replace(')', '').str.replace('%', 'percent').str.replace('/', '_')
        df.to_sql('nec_section_of_work', con=conn, if_exists='replace', index= False)
        for x in Data2:
            # if len(x.keys()) == 0:
            #     continue
            df_nested_list = json_normalize(x)
            df2=df_nested_list
            # # print('process 1')
            # df2 = pd.DataFrame.from_dict(x)
            # # print(x['data'].keys())
            # df2 = df_nested_list.reindex(columns=Mapping.keys())
                # # print(df2)
                # our dataframe
                # data = {'Name': ['Tom', 'dick', 'harry'],
                #         'Age': [22, 21, 24]}

                # # Create DataFrame
                # df = pd.DataFrame(data)
                # conn = psycopg2.connect(conn_string
                #                         )
                # conn.autocommit = True
                # cursor = conn.cursor()
            
            _df = _df.append(df2)
        # df['is_latest'].fillna('No',inplace=True)
        # df.rename(columns=Mapping, inplace=True)
        _df['Starting Date']=_df['Starting Date'].apply(lambda row : datetime.strptime(row[0:24], '%a %b %d %Y %H:%M:%S'))
        _df['Original completion dates']=_df['Original completion dates'].apply(lambda row : datetime.strptime(row[0:24], '%a %b %d %Y %H:%M:%S'))
        # _df['Revised Completion Date']=_df['Revised Completion Date'].apply(pd.to_datetime)
        _df.columns = _df.columns.str.replace(' ', '_').str.replace('.', '').str.replace('(', '_').str.replace(')', '').str.replace('%', 'percent').str.replace('/', '_')
        _df.to_sql('nec_section_of_work_key_date', con=conn, if_exists='replace', index= False)
        
    
    # # print("success")

# */2 * * * * Execute every two minute 
with DAG(
        dag_id="nec_section_of_work",
        schedule_interval="0 0,4,8,11,16 * * *",
        default_args={
            "owner": "airflow",
            "retries": 1,
            "retry_delay": timedelta(minutes=5),
            "start_date": datetime(2022, 10, 24)
        },
        catchup=False) as f:
    
    getMongoDB = PythonOperator(
        task_id="getDataAndSendToPSQL",
        python_callable=getMongoDB,
        op_kwargs={"name": "Dylan"},
        provide_context=True,
    )

    # reformData = PythonOperator(
    #     task_id="reformData",
    #     python_callable=reformData,
    #     provide_context=True,
    #     # op_kwargs={"name": "Dylan"}
    # )

    getDrowToken = PythonOperator(
        task_id="getDrowToken",
        python_callable=getDrowToken,
        provide_context=True,
        # op_kwargs={"name": "Dylan"}
    )

getDrowToken >> getMongoDB