Commit bd01ebb9 authored by Timothy Ardha's avatar Timothy Ardha

Merge branch 'jtrust' of gitlab.jiarsi.com:margrenzo.g/airflow into jtrust

parents cb37199d 24b1084c
...@@ -39,7 +39,7 @@ DS_CREATE_TABLE = '' ...@@ -39,7 +39,7 @@ DS_CREATE_TABLE = ''
def create_ddl(schema, column): def create_ddl(schema, column):
return f"""select string_agg(x,'') from (select return f"""select string_agg(x,'') from (select
'create table {POSTGRES_SCHEMA}.'||table_catalog||'_'||table_name|| ' ( ' || string_agg('"'||column_name||'"', ' text,' order by ordinal_position) || ' text'|| ' ); ' as x 'create table {POSTGRES_SCHEMA}.'||table_catalog||'_'||table_name|| ' ( ' || string_agg('"'||replace(column_name, 'date_of_data','dateofdata')||'"', ' text,' order by ordinal_position) || ' text'|| ' ); ' as x
from information_schema.columns where table_schema = '{schema}' from information_schema.columns where table_schema = '{schema}'
and table_name in ({column}) and table_name in ({column})
...@@ -81,7 +81,7 @@ def ds_get_syntax(ti): ...@@ -81,7 +81,7 @@ def ds_get_syntax(ti):
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY {DS_SCHEMA}.{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""", "copy_sql": f"""COPY {table['file_id']} to STDOUT delimiter '|' CSV header""",
"file_id": f"""{table['table_source']}_{table['file_id']}""", "file_id": f"""{table['table_source']}_{table['file_id']}""",
"database": f"""{table['table_source']}"""} "database": f"""{table['table_source']}"""}
} }
...@@ -116,8 +116,8 @@ def ds_get_ddl(ti): ...@@ -116,8 +116,8 @@ def ds_get_ddl(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
sql_table = [] sql_table = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
print(create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""")) print(create_ddl(table['file_id'].split('.')[0], f"""'{table['file_id'].split('.')[1]}'"""))
sql_stmt = create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""") sql_stmt = create_ddl(table['file_id'].split('.')[0], f"""'{table['file_id'].split('.')[1]}'""")
pg_hook = PostgresHook( pg_hook = PostgresHook(
postgres_conn_id=DS_CONN_ID, postgres_conn_id=DS_CONN_ID,
schema=table['table_source'] schema=table['table_source']
...@@ -139,7 +139,7 @@ def pg_push_syntax(ti): ...@@ -139,7 +139,7 @@ def pg_push_syntax(ti):
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_source']}_{table['file_id'].replace(".", "_")} from STDOUT delimiter '|' CSV header""", "copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_source']}_{table['file_id'].split('.')[1].replace(".", "_")} from STDOUT delimiter '|' CSV header""",
"file_id": f"""{table['table_source']}_{table['file_id']}""",}} "file_id": f"""{table['table_source']}_{table['file_id']}""",}}
for for
table in json.loads(iris[0][0][0])] table in json.loads(iris[0][0][0])]
...@@ -270,6 +270,12 @@ with DAG("APJ_1_call_center", ...@@ -270,6 +270,12 @@ with DAG("APJ_1_call_center",
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator( delete_before = BashOperator(
task_id="delete_before", task_id="delete_before",
...@@ -289,4 +295,4 @@ with DAG("APJ_1_call_center", ...@@ -289,4 +295,4 @@ with DAG("APJ_1_call_center",
) )
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> delete_before >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
...@@ -81,7 +81,7 @@ def ds_get_syntax(ti): ...@@ -81,7 +81,7 @@ def ds_get_syntax(ti):
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY {DS_SCHEMA}.{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""", "copy_sql": f"""COPY {table['table_source']}.{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""",
"file_id": f"""{table['table_source']}_{table['file_id']}""", "file_id": f"""{table['table_source']}_{table['file_id']}""",
"database": f"""{table['table_source']}"""} "database": f"""{table['table_source']}"""}
} }
...@@ -116,8 +116,8 @@ def ds_get_ddl(ti): ...@@ -116,8 +116,8 @@ def ds_get_ddl(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
sql_table = [] sql_table = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
print(create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""")) print(create_ddl(table['table_source'], f"""'{table['file_id']}'"""))
sql_stmt = create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""") sql_stmt = create_ddl(table['table_source'], f"""'{table['file_id']}'""")
pg_hook = PostgresHook( pg_hook = PostgresHook(
postgres_conn_id=DS_CONN_ID, postgres_conn_id=DS_CONN_ID,
schema=table['table_source'] schema=table['table_source']
...@@ -152,7 +152,7 @@ def pg_push_csv(ti, copy_sql, file_id): ...@@ -152,7 +152,7 @@ def pg_push_csv(ti, copy_sql, file_id):
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""") "WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
with DAG("APJ_1_ebanking_jtrust", with DAG("APJ_1_ebanking",
start_date=datetime(2021, 1, 1), start_date=datetime(2021, 1, 1),
schedule=cron_tab, schedule=cron_tab,
catchup=False, catchup=False,
...@@ -270,6 +270,12 @@ with DAG("APJ_1_ebanking_jtrust", ...@@ -270,6 +270,12 @@ with DAG("APJ_1_ebanking_jtrust",
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator( delete_before = BashOperator(
task_id="delete_before", task_id="delete_before",
...@@ -289,4 +295,4 @@ with DAG("APJ_1_ebanking_jtrust", ...@@ -289,4 +295,4 @@ with DAG("APJ_1_ebanking_jtrust",
) )
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> delete_before >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
...@@ -256,6 +256,12 @@ with DAG("APJ_1_EDC", ...@@ -256,6 +256,12 @@ with DAG("APJ_1_EDC",
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
history_finish = PostgresOperator( history_finish = PostgresOperator(
sql=f""" sql=f"""
...@@ -270,4 +276,4 @@ with DAG("APJ_1_EDC", ...@@ -270,4 +276,4 @@ with DAG("APJ_1_EDC",
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> history_finish
\ No newline at end of file \ No newline at end of file
...@@ -81,7 +81,7 @@ def ds_get_syntax(ti): ...@@ -81,7 +81,7 @@ def ds_get_syntax(ti):
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY {DS_SCHEMA}.{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""", "copy_sql": f"""COPY {table['table_source']}.{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""",
"file_id": f"""{table['table_source']}_{table['file_id']}""", "file_id": f"""{table['table_source']}_{table['file_id']}""",
"database": f"""{table['table_source']}"""} "database": f"""{table['table_source']}"""}
} }
...@@ -116,8 +116,8 @@ def ds_get_ddl(ti): ...@@ -116,8 +116,8 @@ def ds_get_ddl(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
sql_table = [] sql_table = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
print(create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""")) print(create_ddl(table['table_source'], f"""'{table['file_id']}'"""))
sql_stmt = create_ddl(DS_SCHEMA, f"""'{table['file_id']}'""") sql_stmt = create_ddl(table['table_source'], f"""'{table['file_id']}'""")
pg_hook = PostgresHook( pg_hook = PostgresHook(
postgres_conn_id=DS_CONN_ID, postgres_conn_id=DS_CONN_ID,
schema=table['table_source'] schema=table['table_source']
...@@ -152,7 +152,7 @@ def pg_push_csv(ti, copy_sql, file_id): ...@@ -152,7 +152,7 @@ def pg_push_csv(ti, copy_sql, file_id):
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""") "WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
with DAG("APJ_1_ibb_jtrust", with DAG("APJ_1_ibb",
start_date=datetime(2021, 1, 1), start_date=datetime(2021, 1, 1),
schedule=cron_tab, schedule=cron_tab,
catchup=False, catchup=False,
...@@ -271,6 +271,12 @@ with DAG("APJ_1_ibb_jtrust", ...@@ -271,6 +271,12 @@ with DAG("APJ_1_ibb_jtrust",
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator( delete_before = BashOperator(
task_id="delete_before", task_id="delete_before",
bash_command=f""" bash_command=f"""
...@@ -289,4 +295,4 @@ with DAG("APJ_1_ibb_jtrust", ...@@ -289,4 +295,4 @@ with DAG("APJ_1_ibb_jtrust",
) )
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> delete_before >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
...@@ -19,12 +19,13 @@ from airflow.decorators import task ...@@ -19,12 +19,13 @@ from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg from airflow import XComArg
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get( yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA") "DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d') yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
yesterday_lusa = (datetime.strptime(yesterday_nodash, '%Y%m%d') - timedelta(1)).strftime('%Y%m%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}""" d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
cron_tab = None if Variable.get("CRON_JOB") == 'manual' else Variable.get("CRON_JOB")
today = d[:] today = d[:]
POSTGRES_CONN_ID = Variable.get("DS_DB") POSTGRES_CONN_ID = Variable.get("DS_DB")
...@@ -35,17 +36,20 @@ DS_DB = 'los' ...@@ -35,17 +36,20 @@ DS_DB = 'los'
DS_SCHEMA = 'public' DS_SCHEMA = 'public'
DS_CREATE_TABLE = '' DS_CREATE_TABLE = ''
def create_ddl(schema,column):
def create_ddl(schema, column):
return f"""select string_agg(x,'') from (select return f"""select string_agg(x,'') from (select
'create table {POSTGRES_SCHEMA}.'||table_name || ' ( ' || string_agg(column_name, ' text,' order by ordinal_position) || ' text'|| ' ); ' as x 'create table {POSTGRES_SCHEMA}."'||table_catalog||'_'||table_name|| '" ( ' || string_agg('"'||column_name||'"', ' text,' order by ordinal_position) || ' text'|| ' ); ' as x
from information_schema.columns where table_schema = '{schema}' from information_schema.columns where table_schema = '{schema}'
and table_name in ({column}) and table_name in ({column})
group by table_schema, table_name) as x ;""" group by table_catalog, table_schema, table_name) as x ;"""
def _start(): def _start():
print("Start :: Extractor ") print("Start :: Extractor ")
def check_done_ext(**kwargs): def check_done_ext(**kwargs):
from random import randint from random import randint
...@@ -59,6 +63,7 @@ def check_done_ext(**kwargs): ...@@ -59,6 +63,7 @@ def check_done_ext(**kwargs):
# STOP DAG # STOP DAG
return True return True
def ds_list_extractor(): def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('los', '{yesterday_strip}');""" sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('los', '{yesterday_strip}');"""
pg_hook = PostgresHook( pg_hook = PostgresHook(
...@@ -70,29 +75,38 @@ def ds_list_extractor(): ...@@ -70,29 +75,38 @@ def ds_list_extractor():
files = cursor.fetchall() files = cursor.fetchall()
return files return files
def ds_get_syntax(ti): def ds_get_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY ( select * from {DS_SCHEMA}.{table['file_id'].replace(".", "_")}{table['partition_table_format']} {table['condition']}) to STDOUT delimiter '{table['delimiter']}' CSV header""", "copy_sql": f"""COPY "{table['table_source']}".{table['file_id'].replace(".", "_")} to STDOUT delimiter '|' CSV header""",
"file_id": table['file_id']}} "file_id": f"""{table['table_source']}_{table['file_id']}""",
"database": f"""{table['table_source']}"""}
}
for for
table in json.loads(iris[0][0][0])] table in json.loads(iris[0][0][0])]
def ds_get_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(DS_CONN_ID) def ds_get_csv(ti, copy_sql, file_id, database):
pg_hook = PostgresHook(
postgres_conn_id=DS_CONN_ID,
schema=database
)
print('INFO::',pg_hook)
pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + ( pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + (
f"""{yesterday_nodash}/{yesterday_nodash}.""" if Variable.get( f"""{yesterday_nodash}/{yesterday_nodash}.""" if Variable.get(
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""") "WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
# #
# def ds_drop_syntax(ti): # def ds_drop_syntax(ti):
# iris = ti.xcom_pull(task_ids=['ds_list_extractor']) # iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
# if not iris: # if not iris:
# raise Exception('No data.') # raise Exception('No data.')
# return [{ # return [{
# "sql": f"""drop table ds_ebanking.{table['file_id'].replace(".", "_")} CASCADE;"""} # "sql": f"""drop table ds_los.{table['file_id'].replace(".", "_")} CASCADE;"""}
# for # for
# table in json.loads(iris[0][0][0])] # table in json.loads(iris[0][0][0])]
...@@ -100,48 +114,49 @@ def ds_get_csv(ti, copy_sql, file_id): ...@@ -100,48 +114,49 @@ def ds_get_csv(ti, copy_sql, file_id):
def ds_get_ddl(ti): def ds_get_ddl(ti):
global DS_CREATE_TABLE global DS_CREATE_TABLE
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
column = [] sql_table = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
column.append(f"""'{table['file_id']}'""") print(create_ddl(table['table_source'], f"""'{table['file_id']}'"""))
print(create_ddl(DS_SCHEMA, ','.join(column))) sql_stmt = create_ddl(table['table_source'], f"""'{table['file_id']}'""")
sql_stmt = create_ddl(DS_SCHEMA, ','.join(column)) pg_hook = PostgresHook(
pg_hook = PostgresHook( postgres_conn_id=DS_CONN_ID,
postgres_conn_id=DS_CONN_ID, schema=table['table_source']
) )
pg_conn = pg_hook.get_conn() pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor() cursor = pg_conn.cursor()
cursor.execute(sql_stmt) cursor.execute(sql_stmt)
return cursor.fetchall()[0][0] sql_table.append(cursor.fetchall()[0][0])
return sql_table
def pg_ddl_syntax(ti): def pg_ddl_syntax(ti):
print(ti.xcom_pull(task_ids=['ds_get_ddl'])[0]) print(ti.xcom_pull(task_ids=['ds_get_ddl'])[0])
return [{"sql": ti.xcom_pull(task_ids=['ds_get_ddl'])[0]}] return [{"sql": ti.xcom_pull(task_ids=['ds_get_ddl'])[0]}]
def pg_push_syntax(ti): def pg_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris: if not iris:
raise Exception('No data.') raise Exception('No data.')
return [{"op_kwargs": { return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['file_id'].replace(".", "_")} from STDOUT delimiter '|' CSV header""", "copy_sql": f"""COPY {POSTGRES_SCHEMA}."{table['table_source']}_{table['file_id'].replace(".", "_")}" from STDOUT delimiter '|' CSV header""",
"file_id": table['file_id']}} "file_id": f"""{table['table_source']}_{table['file_id']}""",}}
for for
table in json.loads(iris[0][0][0])] table in json.loads(iris[0][0][0])]
def pg_push_csv(ti, copy_sql, file_id): def pg_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID) pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + ( pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + (
f"""{yesterday_nodash}/{yesterday_nodash}.""" if Variable.get( f"""{yesterday_nodash}/{yesterday_nodash}.""" if Variable.get(
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""") "WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
with DAG("APJ_1_LOS",
with DAG("APJ_1_los",
start_date=datetime(2021, 1, 1), start_date=datetime(2021, 1, 1),
schedule_interval='10 0 * * *', schedule=cron_tab,
catchup=False, catchup=False,
concurrency=3) as dag: concurrency=3) as dag:
begin = PythonOperator( begin = PythonOperator(
task_id=f"Begin", task_id=f"Begin",
python_callable=_start, python_callable=_start,
...@@ -150,13 +165,6 @@ with DAG("APJ_1_LOS", ...@@ -150,13 +165,6 @@ with DAG("APJ_1_LOS",
} }
) )
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
history_start = PostgresOperator( history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history" sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES (source, start_time, finish_time, status, env, date_of_data) VALUES
...@@ -165,6 +173,13 @@ with DAG("APJ_1_LOS", ...@@ -165,6 +173,13 @@ with DAG("APJ_1_LOS",
postgres_conn_id=POSTGRES_CONN_ID, postgres_conn_id=POSTGRES_CONN_ID,
) )
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
create_folder = BashOperator( create_folder = BashOperator(
task_id='create_folder', task_id='create_folder',
bash_command=f"""mkdir -p {Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}""" bash_command=f"""mkdir -p {Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}"""
...@@ -201,13 +216,13 @@ with DAG("APJ_1_LOS", ...@@ -201,13 +216,13 @@ with DAG("APJ_1_LOS",
# ) # )
pg_drop_schema = PostgresOperator( pg_drop_schema = PostgresOperator(
sql= f"""drop schema IF EXISTS {POSTGRES_SCHEMA} CASCADE;""", sql=f"""drop schema IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
task_id="pg_drop_schema", task_id="pg_drop_schema",
postgres_conn_id=POSTGRES_CONN_ID, postgres_conn_id=POSTGRES_CONN_ID,
) )
pg_create_schema = PostgresOperator( pg_create_schema = PostgresOperator(
sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""", sql=f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
task_id="pg_create_schema", task_id="pg_create_schema",
postgres_conn_id=POSTGRES_CONN_ID, postgres_conn_id=POSTGRES_CONN_ID,
) )
...@@ -252,8 +267,22 @@ with DAG("APJ_1_LOS", ...@@ -252,8 +267,22 @@ with DAG("APJ_1_LOS",
ds_ext_done = BashOperator( ds_ext_done = BashOperator(
task_id="ds_ext_done", task_id="ds_ext_done",
bash_command=f""" bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
)
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator(
task_id="delete_before",
bash_command=f"""
rm -r /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_lusa};
""",
) )
history_finish = PostgresOperator( history_finish = PostgresOperator(
...@@ -267,6 +296,4 @@ with DAG("APJ_1_LOS", ...@@ -267,6 +296,4 @@ with DAG("APJ_1_LOS",
) )
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish
\ No newline at end of file
...@@ -258,6 +258,12 @@ with DAG("APJ_1_mgate", ...@@ -258,6 +258,12 @@ with DAG("APJ_1_mgate",
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator( delete_before = BashOperator(
task_id="delete_before", task_id="delete_before",
...@@ -278,4 +284,4 @@ with DAG("APJ_1_mgate", ...@@ -278,4 +284,4 @@ with DAG("APJ_1_mgate",
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> delete_before >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
\ No newline at end of file \ No newline at end of file
import json, os
from datetime import date
import calendar
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator, ShortCircuitOperator
from airflow.operators.bash import BashOperator
from datetime import datetime, timedelta
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.models import Variable
from airflow.providers.postgres.operators.postgres import PostgresOperator
# from acme.operators.mssql_import_operator import MsSqlImportOperator
from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.utils.trigger_rule import TriggerRule
from airflow.models.xcom import XCom
from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
yesterday_lusa = (datetime.strptime(yesterday_nodash, '%Y%m%d') - timedelta(1)).strftime('%Y%m%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
cron_tab = None if Variable.get("CRON_JOB") == 'manual' else Variable.get("CRON_JOB")
today = d[:]
POSTGRES_CONN_ID = Variable.get("DS_DB")
POSTGRES_SCHEMA = 'ds_opics' + Variable.get("ENV_T24")
DS_CONN_ID = Variable.get("DS_OPICS")
DS_CONN_ID_INTER = Variable.get("DS_OPICS_INTER")
DS_FOLDER = 'opics'
DS_DB = 'opics'
DS_SCHEMA = 'opics'
DS_CREATE_TABLE = ''
# def create_ddl(schema,column):
# return f"""SELECT 'CREATE TABLE {POSTGRES_SCHEMA}.opics_' + lower(table_name) + ' ( ' +
# STUFF(
# (
# SELECT ', ' + lower(column_name) + ' ' + replace(case when DATA_TYPE = 'char' then DATA_TYPE + '(' + CONVERT(varchar(10),CHARACTER_MAXIMUM_LENGTH)+ ')' else DATA_TYPE end, 'datetime','timestamp')
# FROM information_schema.columns c2
# WHERE c2.table_schema = c.table_schema
# AND c2.table_name = c.table_name
# ORDER BY ordinal_position
# FOR XML PATH(''), TYPE
# ).value('.', 'NVARCHAR(MAX)'), 1, 2, ''
# ) + ' );' AS create_table_statement
# FROM information_schema.columns c
# WHERE table_schema = 'dbo'
# AND table_name in ({column})
# GROUP BY table_schema, table_name;"""
def create_ddl(schema,column):
return f"""SELECT 'CREATE TABLE {POSTGRES_SCHEMA}.opics_' + lower(table_name) + ' ( ' +
STUFF(
(
SELECT ', ' + lower(column_name) + ' text'
FROM information_schema.columns c2
WHERE c2.table_schema = c.table_schema
AND c2.table_name = c.table_name
ORDER BY ordinal_position
FOR XML PATH(''), TYPE
).value('.', 'NVARCHAR(MAX)'), 1, 2, ''
) + ' );' AS create_table_statement
FROM information_schema.columns c
WHERE table_schema = 'dbo'
AND table_name in ({column})
GROUP BY table_schema, table_name;"""
def _start():
print("Start :: Extractor ")
def check_done_ext(**kwargs):
from random import randint
# number = randint(0, 10)
if os.path.isfile(f'''{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}/done_ext.csv'''):
print("A")
return False
else:
print("B")
# STOP DAG
return True
def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('opics_db', '{yesterday_strip}');"""
pg_hook = PostgresHook(
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor()
cursor.execute(sql_stmt)
files = cursor.fetchall()
return files
def ds_get_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{
# "bash_command": f"""which bcp"""
"bash_command": f"""bcp { '"'+table['query_out']+'" queryout ' if table['file_type'] == 'query' else table['table_source']+'.dbo.'+table['file_id'].replace(".", "_")+' out'} {Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + (
f"""{yesterday_nodash}""" if Variable.get(
"WITH_DATE") == 'Y' else '') + f"""/{table['file_id'].replace(".", "_")}.csv -c -t'{table['delimiter']}' -S172.19.3.80,1433 -Udwh_user -PJtrust@123 -u""",
# "file_id": table['file_id']
}
for
table in json.loads(iris[0][0][0])]
def ds_get_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(DS_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + (
f"""{yesterday_nodash}/{yesterday_nodash}.""" if Variable.get(
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
#
# def ds_drop_syntax(ti):
# iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
# if not iris:
# raise Exception('No data.')
# return [{
# "sql": f"""drop table ds_ebanking.{table['file_id'].replace(".", "_")} CASCADE;"""}
# for
# table in json.loads(iris[0][0][0])]
def ds_get_ddl(ti):
global DS_CREATE_TABLE
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
column = []
for table in json.loads(iris[0][0][0]):
column.append(f"""'{table['file_id']}'""")
print(create_ddl('dbo', ','.join(column)))
sql_stmt = create_ddl('dbo', ','.join(column))
mssql_hook = MsSqlHook(
mssql_conn_id=DS_CONN_ID,
)
ms_conn = mssql_hook.get_conn()
cursor = ms_conn.cursor()
cursor.execute(sql_stmt)
x = cursor.fetchall()
print(x)
column_i = []
for table_i in json.loads(iris[0][0][0]):
column_i.append(f"""'{table_i['file_id']}'""")
print(create_ddl('dbo', ','.join(column_i)))
sql_stmt_i = create_ddl('dbo', ','.join(column_i))
mssql_hook_i = MsSqlHook(
mssql_conn_id=DS_CONN_ID_INTER,
)
ms_conn_i = mssql_hook_i.get_conn()
cursor_i = ms_conn_i.cursor()
cursor_i.execute(sql_stmt_i)
x_i = cursor_i.fetchall()
column_q = []
for table_q in json.loads(iris[0][0][0]):
print(table_q['query_table'])
if table_q['query_table'] is not None:
column_q.append([f"""{table_q['query_table']}"""])
return x + x_i + column_q
def pg_ddl_syntax(ti):
colus = []
print(ti.xcom_pull(task_ids=['ds_get_ddl'][0]))
for table in ti.xcom_pull(task_ids=['ds_get_ddl'][0]):
colus.append(f"""{table[0]}""")
print(colus)
return [{"sql": colus}]
def pg_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_name'].replace(".", "_").replace(table['table_source']+'_', "")} from STDOUT delimiter '|' CSV header""",
"file_id": table['file_id']}}
for
table in json.loads(iris[0][0][0])]
def pg_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/""" + (
f"""{yesterday_nodash}/""" if Variable.get(
"WITH_DATE") == 'Y' else '') + f"""{file_id}.csv""")
with DAG("APJ_1_opics_db",
start_date=datetime(2021, 1, 1),
schedule_interval=cron_tab,
catchup=False,
concurrency=3) as dag:
begin = PythonOperator(
task_id=f"Begin",
python_callable=_start,
op_kwargs={
}
)
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES
('{POSTGRES_SCHEMA}'::varchar(255), '{today}'::timestamp with time zone, '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone, 'ONPROCESS'::varchar(100), '', '{yesterday_nodash}');""",
task_id="history_start",
postgres_conn_id=POSTGRES_CONN_ID,
)
create_folder = BashOperator(
task_id='create_folder',
bash_command=f"""mkdir -p {Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}"""
)
ds_list_extractor = PythonOperator(
task_id='ds_list_extractor',
python_callable=ds_list_extractor,
do_xcom_push=True
)
ds_get_syntax = PythonOperator(
task_id='ds_syntax_get',
python_callable=ds_get_syntax
)
ds_table_to_csv = BashOperator.partial(
task_id="ds_table_to_csv",
).expand_kwargs(
XComArg(ds_get_syntax),
)
# ds_table_to_csv = PythonOperator.partial(
# task_id="ds_table_to_csv",
# python_callable=ds_get_csv,
# dag=dag
# ).expand_kwargs(
# XComArg(ds_get_syntax),
# )
ds_get_ddl = PythonOperator(
task_id='ds_get_ddl',
python_callable=ds_get_ddl,
do_xcom_push=True
)
# ds_drop_syntax = PythonOperator(
# task_id='ds_drop_syntax',
# python_callable=ds_drop_syntax
# )
pg_drop_schema = PostgresOperator(
sql= f"""drop schema IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
task_id="pg_drop_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_create_schema = PostgresOperator(
sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
task_id="pg_create_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_ddl_syntax = PythonOperator(
task_id='pg_ddl_syntax',
python_callable=pg_ddl_syntax
)
pg_create_table = PostgresOperator.partial(
task_id="pg_create_table",
postgres_conn_id=POSTGRES_CONN_ID,
).expand_kwargs(
XComArg(pg_ddl_syntax),
)
pg_push_syntax = PythonOperator(
task_id='pg_syntax_push',
python_callable=pg_push_syntax
)
pg_csv_to_table = PythonOperator.partial(
task_id="pg_csv_to_table",
python_callable=pg_push_csv,
dag=dag
).expand_kwargs(
XComArg(pg_push_syntax),
)
set_access_schemma = PostgresOperator(
sql=f"""GRANT USAGE ON SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_schemma",
postgres_conn_id=POSTGRES_CONN_ID,
)
set_access_all_table = PostgresOperator(
sql=f"""GRANT SELECT ON ALL TABLES IN SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_all_table",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_ext_done = BashOperator(
task_id="ds_ext_done",
bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""",
)
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
delete_before = BashOperator(
task_id="delete_before",
bash_command=f"""
rm -r /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_lusa};
""",
)
history_finish = PostgresOperator(
sql=f"""
UPDATE ds_conf."Datasource_history"
SET status = 'DONE', finish_time = '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone
WHERE source = '{POSTGRES_SCHEMA}' and status = 'ONPROCESS';
""",
task_id="history_finish",
postgres_conn_id=POSTGRES_CONN_ID,
)
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> delete_before >> history_finish
import json, os
import csv
from datetime import date
import calendar
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator, ShortCircuitOperator
from airflow.operators.bash import BashOperator
from datetime import datetime, timedelta
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.models import Variable
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.utils.trigger_rule import TriggerRule
from airflow.models.xcom import XCom
from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg
# yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d')
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
today = d[:]
POSTGRES_CONN_ID = Variable.get("DS_DB")
POSTGRES_ENV = Variable.get("ENV_T24")
POSTGRES_SCHEMA = 'ds_opics' + Variable.get("ENV_T24")
# POSTGRES_SCHEMA = 'ds_switching'
DS_FOLDER = 'opics'
DS_DB = 'opics'
DS_SCHEMA = 'opics'
DS_CREATE_TABLE = ''
def _start():
print("Start :: Extractor ")
def check_done_ext(**kwargs):
from random import randint
# number = randint(0, 10)
print(f'''{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/done.csv''')
if os.path.isfile(f'''{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}/done_ext.csv'''):
print("A")
return False
else:
print("B")
# STOP DAG
return True
def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('opics', '{yesterday_strip}');"""
pg_hook = PostgresHook(
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor()
cursor.execute(sql_stmt)
files = cursor.fetchall()
return files
def ds_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_name']} from STDOUT delimiter '{table['delimiter']}' CSV HEADER quote E'"'""",
"file_id": f"""/{table['file_id']}"""}}
for
table in json.loads(iris[0][0][0])]
def pg_ddl_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = []
for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{table['file_id']}""") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__()
arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace('.', '_').replace(' ', '_').replace('-', '_').replace("'", '').replace("(",'').replace(")",'').replace('/','or').lower() for w in field_names_list])} text);"""})
return arr
def ds_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""/opt/airflow/dags/DFE/{DS_FOLDER}/{file_id}""")
with DAG("APJ_1_opics_jtrust",
start_date=datetime(2021, 1, 1),
schedule_interval=None,
catchup=False,
concurrency=3) as dag:
begin = PythonOperator(
task_id=f"Begin",
python_callable=_start,
op_kwargs={
}
)
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES
('{POSTGRES_SCHEMA}'::varchar(255), '{today}'::timestamp with time zone, '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone, 'ONPROCESS'::varchar(100), '', '{yesterday_nodash}');""",
task_id="history_start",
postgres_conn_id=POSTGRES_CONN_ID,
)
drop_schema = PostgresOperator(
sql=f"""drop schema if exists {POSTGRES_SCHEMA} cascade;""",
task_id="drop_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
create_schema = PostgresOperator(
sql=f"""create schema if not exists {POSTGRES_SCHEMA};""",
task_id="create_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_list_extractor = PythonOperator(
task_id='ds_list_extractor',
python_callable=ds_list_extractor,
do_xcom_push=True
)
# pg_drop_schema = PostgresOperator(
# sql= f"""DROP TABLE IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
# task_id="pg_drop_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
#
# pg_create_schema = PostgresOperator(
# sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
# task_id="pg_create_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
pg_ddl_syntax = PythonOperator(
task_id='pg_ddl_syntax',
python_callable=pg_ddl_syntax
)
pg_create_table = PostgresOperator.partial(
task_id="pg_create_table",
postgres_conn_id=POSTGRES_CONN_ID,
).expand_kwargs(
XComArg(pg_ddl_syntax),
)
# ds_truncate_syntax = PythonOperator(
# task_id='ds_truncate_syntax',
# python_callable=ds_truncate_syntax
# )
#
# ds_truncate = PostgresOperator.partial(
# task_id="ds_truncate",
# postgres_conn_id=POSTGRES_CONN_ID,
# ).expand_kwargs(
# XComArg(ds_truncate_syntax)
# )
#
ds_push_syntax = PythonOperator(
task_id='ds_syntax_push',
python_callable=ds_push_syntax
)
ds_csv_to_table = PythonOperator.partial(
task_id="ds_csv_to_table",
python_callable=ds_push_csv,
dag=dag
).expand_kwargs(
XComArg(ds_push_syntax),
)
set_access_schemma = PostgresOperator(
sql=f"""GRANT USAGE ON SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_schemma",
postgres_conn_id=POSTGRES_CONN_ID,
)
set_access_all_table = PostgresOperator(
sql=f"""GRANT SELECT ON ALL TABLES IN SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_all_table",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_ext_done = BashOperator(
task_id="ds_ext_done",
bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""",
)
history_finish = PostgresOperator(
sql=f"""
UPDATE ds_conf."Datasource_history"
SET status = 'DONE', finish_time = '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone
WHERE source = '{POSTGRES_SCHEMA}' and status = 'ONPROCESS';
""",
task_id="history_finish",
postgres_conn_id=POSTGRES_CONN_ID,
)
begin >> check_done_ext >> history_start >> drop_schema >> create_schema >> ds_list_extractor >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> ds_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish
import json, os
import csv
from datetime import date
import calendar
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator, ShortCircuitOperator
from airflow.operators.bash import BashOperator
from datetime import datetime, timedelta
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.models import Variable
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.utils.trigger_rule import TriggerRule
from airflow.models.xcom import XCom
from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg
# yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d')
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
today = d[:]
ddmmyyyy = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%d%m%Y')
POSTGRES_CONN_ID = Variable.get("DS_DB")
POSTGRES_ENV = Variable.get("ENV_T24")
POSTGRES_SCHEMA = 'ds_regla' + Variable.get("ENV_T24")
# POSTGRES_SCHEMA = 'ds_switching'
DS_FOLDER = 'regla'
DS_DB = 'regla'
DS_SCHEMA = 'regla'
DS_CREATE_TABLE = ''
def _start():
print("Start :: Extractor ")
def check_done_ext(**kwargs):
from random import randint
# number = randint(0, 10)
print(f'''{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/done.csv''')
if os.path.isfile(f'''{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}/done_ext.csv'''):
print("A")
return False
else:
print("B")
# STOP DAG
return True
def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('regla', '{yesterday_strip}');"""
pg_hook = PostgresHook(
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor()
cursor.execute(sql_stmt)
files = cursor.fetchall()
return files
def ds_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_name']} from STDOUT delimiter '{table['delimiter']}' CSV HEADER quote E'\b'""",
"file_id": f"""/{table['file_id']}"""}}
for
table in json.loads(iris[0][0][0])]
def pg_ddl_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = []
for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{table['file_id']}""") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
for row in reader:
# adding the first row
field_names_list = row
# breaking the loop after the
# first iteration itself
break
arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace('.', '_').replace(' ', '_').replace('-', '_').replace("'", '').replace("(",'').replace(")",'').replace('/','or').lower() for w in field_names_list])} text);"""})
return arr
def ds_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""/opt/airflow/dags/DFE/{DS_FOLDER}/{file_id}""")
def csv_clean_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = []
for table in json.loads(iris[0][0][0]):
# with open(f"""{Variable.get("LOCAL_PATH")}regla/{table['file_id']}""", encoding = "ISO-8859-1") as csvFile:
# reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
# field_names_list = reader.__next__()
arr.append({"bash_command": f"""echo 'OK' """ if table['sed_command'] == 'nil' else (f"""{Variable.get("LOCAL_PATH")}regla/{table['file_id']} > {Variable.get("LOCAL_PATH")}regla/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}regla/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}regla/{table['file_id']} && """.join(table['sed_command'].split('|;|;|')) + f""" {Variable.get("LOCAL_PATH")}regla/{table['file_id']} > {Variable.get("LOCAL_PATH")}regla/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}regla/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}regla/{table['file_id']}""")})
return arr
with DAG("APJ_1_regla_jtrust",
start_date=datetime(2021, 1, 1),
schedule_interval=None,
catchup=False,
concurrency=3) as dag:
begin = PythonOperator(
task_id=f"Begin",
python_callable=_start,
op_kwargs={
}
)
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
create_folder = BashOperator(
task_id='create_folder',
bash_command=f"""mkdir -p {Variable.get("LOCAL_PATH")}{DS_FOLDER}/{ddmmyyyy}"""
)
sftp_input = BashOperator(
task_id="sftp_input",
bash_command=f"""sshpass -p '{Variable.get("SFTP_REGLA_PASSWORD")}' sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_REGLA_USER")}@{Variable.get("SFTP_REGLA_HOST")}:input/*/*{ddmmyyyy}* {Variable.get("LOCAL_PATH")}regla/{ddmmyyyy}/""",
)
sftp_output = BashOperator(
task_id="sftp_output",
bash_command=f"""sshpass -p '{Variable.get("SFTP_REGLA_PASSWORD")}' sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_REGLA_USER")}@{Variable.get("SFTP_REGLA_HOST")}:output/*/*{ddmmyyyy}* {Variable.get("LOCAL_PATH")}regla/{ddmmyyyy}/""",
)
history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES
('{POSTGRES_SCHEMA}'::varchar(255), '{today}'::timestamp with time zone, '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone, 'ONPROCESS'::varchar(100), '', '{yesterday_nodash}');""",
task_id="history_start",
postgres_conn_id=POSTGRES_CONN_ID,
)
drop_schema = PostgresOperator(
sql=f"""drop schema if exists {POSTGRES_SCHEMA} cascade;""",
task_id="drop_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
create_schema = PostgresOperator(
sql=f"""create schema if not exists {POSTGRES_SCHEMA};""",
task_id="create_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_list_extractor = PythonOperator(
task_id='ds_list_extractor',
python_callable=ds_list_extractor,
do_xcom_push=True
)
csv_clean_syntax = PythonOperator(
task_id='csv_clean_syntax',
python_callable=csv_clean_syntax
)
clean_csv = BashOperator.partial(
task_id="clean_csv",
).expand_kwargs(
XComArg(csv_clean_syntax),
)
# pg_drop_schema = PostgresOperator(
# sql= f"""DROP TABLE IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
# task_id="pg_drop_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
#
# pg_create_schema = PostgresOperator(
# sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
# task_id="pg_create_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
pg_ddl_syntax = PythonOperator(
task_id='pg_ddl_syntax',
python_callable=pg_ddl_syntax
)
pg_create_table = PostgresOperator.partial(
task_id="pg_create_table",
postgres_conn_id=POSTGRES_CONN_ID,
).expand_kwargs(
XComArg(pg_ddl_syntax),
)
# ds_truncate_syntax = PythonOperator(
# task_id='ds_truncate_syntax',
# python_callable=ds_truncate_syntax
# )
#
# ds_truncate = PostgresOperator.partial(
# task_id="ds_truncate",
# postgres_conn_id=POSTGRES_CONN_ID,
# ).expand_kwargs(
# XComArg(ds_truncate_syntax)
# )
#
ds_push_syntax = PythonOperator(
task_id='ds_syntax_push',
python_callable=ds_push_syntax
)
ds_csv_to_table = PythonOperator.partial(
task_id="ds_csv_to_table",
python_callable=ds_push_csv,
dag=dag
).expand_kwargs(
XComArg(ds_push_syntax),
)
set_access_schemma = PostgresOperator(
sql=f"""GRANT USAGE ON SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_schemma",
postgres_conn_id=POSTGRES_CONN_ID,
)
set_access_all_table = PostgresOperator(
sql=f"""GRANT SELECT ON ALL TABLES IN SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_all_table",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_ext_done = BashOperator(
task_id="ds_ext_done",
bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{ddmmyyyy}/done_ext.csv;
""",
)
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
history_finish = PostgresOperator(
sql=f"""
UPDATE ds_conf."Datasource_history"
SET status = 'DONE', finish_time = '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone
WHERE source = '{POSTGRES_SCHEMA}' and status = 'ONPROCESS';
""",
task_id="history_finish",
postgres_conn_id=POSTGRES_CONN_ID,
)
begin >> check_done_ext >> create_folder >> sftp_input >> sftp_output >> history_start >> drop_schema >> create_schema >> ds_list_extractor >> csv_clean_syntax >> clean_csv >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> ds_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> history_finish
import json, os
import csv
from datetime import date
import calendar
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator, ShortCircuitOperator
from airflow.operators.bash import BashOperator
from datetime import datetime, timedelta
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.models import Variable
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.utils.trigger_rule import TriggerRule
from airflow.models.xcom import XCom
from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg
# yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d')
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
today = d[:]
POSTGRES_CONN_ID = Variable.get("DS_DB")
POSTGRES_ENV = Variable.get("ENV_T24")
POSTGRES_SCHEMA = 'ds_regla' + Variable.get("ENV_T24")
# POSTGRES_SCHEMA = 'ds_switching'
DS_FOLDER = 'regla'
DS_DB = 'regla'
DS_SCHEMA = 'regla'
DS_CREATE_TABLE = ''
def _start():
print("Start :: Extractor ")
def check_done_ext(**kwargs):
from random import randint
# number = randint(0, 10)
print(f'''{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/done.csv''')
if os.path.isfile(f'''{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}/done_ext.csv'''):
print("A")
return False
else:
print("B")
# STOP DAG
return True
def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('regla', '{yesterday_strip}');"""
pg_hook = PostgresHook(
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor()
cursor.execute(sql_stmt)
files = cursor.fetchall()
return files
def ds_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_name']} from STDOUT delimiter '{table['delimiter']}' CSV HEADER quote E'\b'""",
"file_id": f"""/{table['file_id']}"""}}
for
table in json.loads(iris[0][0][0])]
def pg_ddl_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = []
for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{table['file_id']}""") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__()
arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace('.', '_').replace(' ', '_').replace('-', '_').replace("'", '').replace("(",'').replace(")",'').replace('/','or').lower() for w in field_names_list])} text);"""})
return arr
def ds_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID)
pg_hook.copy_expert(copy_sql, filename=f"""/opt/airflow/dags/DFE/{DS_FOLDER}/{file_id}""")
with DAG("APJ_1_regla_jtrust",
start_date=datetime(2021, 1, 1),
schedule_interval=None,
catchup=False,
concurrency=3) as dag:
begin = PythonOperator(
task_id=f"Begin",
python_callable=_start,
op_kwargs={
}
)
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES
('{POSTGRES_SCHEMA}'::varchar(255), '{today}'::timestamp with time zone, '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone, 'ONPROCESS'::varchar(100), '', '{yesterday_nodash}');""",
task_id="history_start",
postgres_conn_id=POSTGRES_CONN_ID,
)
drop_schema = PostgresOperator(
sql=f"""drop schema if exists {POSTGRES_SCHEMA} cascade;""",
task_id="drop_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
create_schema = PostgresOperator(
sql=f"""create schema if not exists {POSTGRES_SCHEMA};""",
task_id="create_schema",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_list_extractor = PythonOperator(
task_id='ds_list_extractor',
python_callable=ds_list_extractor,
do_xcom_push=True
)
csv_clean_syntax = PythonOperator(
task_id='csv_clean_syntax',
python_callable=csv_clean_syntax
)
clean_csv = BashOperator.partial(
task_id="clean_csv",
).expand_kwargs(
XComArg(csv_clean_syntax),
)
# pg_drop_schema = PostgresOperator(
# sql= f"""DROP TABLE IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
# task_id="pg_drop_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
#
# pg_create_schema = PostgresOperator(
# sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
# task_id="pg_create_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
pg_ddl_syntax = PythonOperator(
task_id='pg_ddl_syntax',
python_callable=pg_ddl_syntax
)
pg_create_table = PostgresOperator.partial(
task_id="pg_create_table",
postgres_conn_id=POSTGRES_CONN_ID,
).expand_kwargs(
XComArg(pg_ddl_syntax),
)
# ds_truncate_syntax = PythonOperator(
# task_id='ds_truncate_syntax',
# python_callable=ds_truncate_syntax
# )
#
# ds_truncate = PostgresOperator.partial(
# task_id="ds_truncate",
# postgres_conn_id=POSTGRES_CONN_ID,
# ).expand_kwargs(
# XComArg(ds_truncate_syntax)
# )
#
ds_push_syntax = PythonOperator(
task_id='ds_syntax_push',
python_callable=ds_push_syntax
)
ds_csv_to_table = PythonOperator.partial(
task_id="ds_csv_to_table",
python_callable=ds_push_csv,
dag=dag
).expand_kwargs(
XComArg(ds_push_syntax),
)
set_access_schemma = PostgresOperator(
sql=f"""GRANT USAGE ON SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_schemma",
postgres_conn_id=POSTGRES_CONN_ID,
)
set_access_all_table = PostgresOperator(
sql=f"""GRANT SELECT ON ALL TABLES IN SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_all_table",
postgres_conn_id=POSTGRES_CONN_ID,
)
ds_ext_done = BashOperator(
task_id="ds_ext_done",
bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""",
)
history_finish = PostgresOperator(
sql=f"""
UPDATE ds_conf."Datasource_history"
SET status = 'DONE', finish_time = '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone
WHERE source = '{POSTGRES_SCHEMA}' and status = 'ONPROCESS';
""",
task_id="history_finish",
postgres_conn_id=POSTGRES_CONN_ID,
)
begin >> check_done_ext >> history_start >> drop_schema >> create_schema >> ds_list_extractor >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> ds_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish
import json, os
import csv
from datetime import date
import calendar
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator, ShortCircuitOperator
from airflow.operators.bash import BashOperator
from datetime import datetime, timedelta
from airflow.providers.sftp.operators.sftp import SFTPOperator
from airflow.models import Variable
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.utils.trigger_rule import TriggerRule
from airflow.models.xcom import XCom
from airflow.decorators import task
from airflow.hooks.postgres_hook import PostgresHook
from airflow import XComArg
# yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d')
yesterday_nodash = (datetime.now() - timedelta(1)).strftime('%Y%m%d') if Variable.get(
"DATE_OF_DATA") == 'today' else Variable.get("DATE_OF_DATA")
yesterday_strip = datetime.strptime(yesterday_nodash, '%Y%m%d').strftime('%Y-%m-%d')
d = f"""{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
today = d[:]
POSTGRES_CONN_ID = Variable.get("DS_DB")
POSTGRES_CONN_ID_DM = Variable.get("DS_DB_DM")
POSTGRES_ENV = Variable.get("ENV_T24")
POSTGRES_SCHEMA = 'ds_regla' + Variable.get("ENV_T24")
# POSTGRES_SCHEMA = 'ds_switching'
DS_FOLDER = 'regla'
DS_DB = 'regla'
DS_SCHEMA = 'regla'
DS_CREATE_TABLE = ''
def _start():
print("Start :: Extractor ")
def check_done_ext(**kwargs):
from random import randint
# number = randint(0, 10)
print(f'''{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/done.csv''')
if os.path.isfile(f'''{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{yesterday_nodash}/done_ext.csv'''):
print("A")
return False
else:
print("B")
# STOP DAG
return True
def ds_list_extractor():
sql_stmt = f"""select * from ds_conf.ds_extractor_list_extractor('regla', '{yesterday_strip}');"""
pg_hook = PostgresHook(
postgres_conn_id=POSTGRES_CONN_ID,
)
pg_conn = pg_hook.get_conn()
cursor = pg_conn.cursor()
cursor.execute(sql_stmt)
files = cursor.fetchall()
return files
def ds_push_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
if not iris:
raise Exception('No data.')
return [{"op_kwargs": {
"copy_sql": f"""COPY {POSTGRES_SCHEMA}.{table['table_name']} from STDOUT delimiter '{table['delimiter']}' CSV HEADER quote E'\b'""",
"file_id": f"""/{table['file_id']}"""}}
for
table in json.loads(iris[0][0][0])]
def pg_ddl_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = []
for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}{DS_FOLDER}/{table['file_id']}""") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__()
arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace('.', '_').replace(' ', '_').replace('-', '_').replace("'", '').replace("(",'').replace(")",'').replace('/','or').lower() for w in field_names_list])} text);"""})
return arr
def ds_push_csv(ti, copy_sql, file_id):
pg_hook = PostgresHook.get_hook(POSTGRES_CONN_ID_DM)
pg_hook.copy_expert(copy_sql, filename=f"""/opt/airflow/dags/DFE/{DS_FOLDER}/{file_id}""")
with DAG("APJ_1_regla_jtrust_dm",
start_date=datetime(2021, 1, 1),
schedule_interval=None,
catchup=False,
concurrency=3) as dag:
begin = PythonOperator(
task_id=f"Begin",
python_callable=_start,
op_kwargs={
}
)
check_done_ext = ShortCircuitOperator(
task_id="check_done_ext",
provide_context=True,
python_callable=check_done_ext,
op_kwargs={},
)
history_start = PostgresOperator(
sql=f"""INSERT INTO ds_conf."Datasource_history"
(source, start_time, finish_time, status, env, date_of_data) VALUES
('{POSTGRES_SCHEMA}'::varchar(255), '{today}'::timestamp with time zone, '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone, 'ONPROCESS'::varchar(100), '', '{yesterday_nodash}');""",
task_id="history_start",
postgres_conn_id=POSTGRES_CONN_ID,
)
drop_schema = PostgresOperator(
sql=f"""drop schema if exists {POSTGRES_SCHEMA} cascade;""",
task_id="drop_schema",
postgres_conn_id=POSTGRES_CONN_ID_DM,
)
create_schema = PostgresOperator(
sql=f"""create schema if not exists {POSTGRES_SCHEMA};""",
task_id="create_schema",
postgres_conn_id=POSTGRES_CONN_ID_DM,
)
ds_list_extractor = PythonOperator(
task_id='ds_list_extractor',
python_callable=ds_list_extractor,
do_xcom_push=True
)
# pg_drop_schema = PostgresOperator(
# sql= f"""DROP TABLE IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
# task_id="pg_drop_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
#
# pg_create_schema = PostgresOperator(
# sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
# task_id="pg_create_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
pg_ddl_syntax = PythonOperator(
task_id='pg_ddl_syntax',
python_callable=pg_ddl_syntax
)
pg_create_table = PostgresOperator.partial(
task_id="pg_create_table",
postgres_conn_id=POSTGRES_CONN_ID_DM,
).expand_kwargs(
XComArg(pg_ddl_syntax),
)
# ds_truncate_syntax = PythonOperator(
# task_id='ds_truncate_syntax',
# python_callable=ds_truncate_syntax
# )
#
# ds_truncate = PostgresOperator.partial(
# task_id="ds_truncate",
# postgres_conn_id=POSTGRES_CONN_ID,
# ).expand_kwargs(
# XComArg(ds_truncate_syntax)
# )
#
ds_push_syntax = PythonOperator(
task_id='ds_syntax_push',
python_callable=ds_push_syntax
)
ds_csv_to_table = PythonOperator.partial(
task_id="ds_csv_to_table",
python_callable=ds_push_csv,
dag=dag
).expand_kwargs(
XComArg(ds_push_syntax),
)
set_access_schemma = PostgresOperator(
sql=f"""GRANT USAGE ON SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_schemma",
postgres_conn_id=POSTGRES_CONN_ID_DM,
)
set_access_all_table = PostgresOperator(
sql=f"""GRANT SELECT ON ALL TABLES IN SCHEMA {POSTGRES_SCHEMA} TO readaccess;""",
task_id="set_access_all_table",
postgres_conn_id=POSTGRES_CONN_ID_DM,
)
ds_ext_done = BashOperator(
task_id="ds_ext_done",
bash_command=f"""
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""",
)
history_finish = PostgresOperator(
sql=f"""
UPDATE ds_conf."Datasource_history"
SET status = 'DONE', finish_time = '{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}'::timestamp with time zone
WHERE source = '{POSTGRES_SCHEMA}' and status = 'ONPROCESS';
""",
task_id="history_finish",
postgres_conn_id=POSTGRES_CONN_ID,
)
begin >> check_done_ext >> history_start >> drop_schema >> create_schema >> ds_list_extractor >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> ds_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish
...@@ -32,7 +32,7 @@ POSTGRES_SCHEMA = 'ds_switching' + Variable.get("ENV_T24") ...@@ -32,7 +32,7 @@ POSTGRES_SCHEMA = 'ds_switching' + Variable.get("ENV_T24")
DS_CONN_ID = Variable.get("DS_SWITCHING") DS_CONN_ID = Variable.get("DS_SWITCHING")
DS_FOLDER = 'switching' DS_FOLDER = 'switching'
DS_DB = 'switching' DS_DB = 'switching'
DS_SCHEMA = 'public' DS_SCHEMA = 'iacss'
DS_CREATE_TABLE = '' DS_CREATE_TABLE = ''
def create_ddl(schema,column): def create_ddl(schema,column):
...@@ -255,6 +255,13 @@ with DAG("APJ_1_SWITCHING_IACSS", ...@@ -255,6 +255,13 @@ with DAG("APJ_1_SWITCHING_IACSS",
touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv; touch /opt/airflow/dags/DFE/{DS_FOLDER}/{yesterday_nodash}/done_ext.csv;
""", """,
) )
ds_to_history = PostgresOperator(
sql=f"""select ds_conf.ds_t24_create_table_history_surrounding('{yesterday_strip}', '{POSTGRES_SCHEMA}');""",
task_id="ds_to_history",
postgres_conn_id=POSTGRES_CONN_ID,
)
history_finish = PostgresOperator( history_finish = PostgresOperator(
sql=f""" sql=f"""
...@@ -269,4 +276,4 @@ with DAG("APJ_1_SWITCHING_IACSS", ...@@ -269,4 +276,4 @@ with DAG("APJ_1_SWITCHING_IACSS",
begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> history_finish begin >> check_done_ext >> history_start >> create_folder >> ds_list_extractor >> ds_get_syntax >> ds_table_to_csv >> ds_get_ddl >> pg_drop_schema >> pg_create_schema >> pg_ddl_syntax >> pg_create_table >> pg_push_syntax >> pg_csv_to_table >> set_access_schemma >> set_access_all_table >> ds_ext_done >> ds_to_history >> history_finish
\ No newline at end of file \ No newline at end of file
...@@ -175,8 +175,8 @@ with DAG("APJ_1_T24", ...@@ -175,8 +175,8 @@ with DAG("APJ_1_T24",
sftp_xx = BashOperator( sftp_xx = BashOperator(
task_id="sftp_xx", task_id="sftp_get_data_t24",
bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:DFE/{yesterday_nodash}/ {Variable.get("LOCAL_PATH")}{DS_FOLDER}/""", bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:DFE/{yesterday_nodash}/ {Variable.get("LOCAL_PATH")}{DS_FOLDER}/""",
) )
# ds_get_t24_extractor = SFTPOperator( # ds_get_t24_extractor = SFTPOperator(
......
...@@ -67,20 +67,20 @@ def pg_ddl_syntax(ti): ...@@ -67,20 +67,20 @@ def pg_ddl_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = [] arr = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""") as csvFile: with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""", encoding = "ISO-8859-1") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""") reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__() field_names_list = reader.__next__()
arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace(' ', '_').replace('.', '_').replace('/', '_').replace('(', '_').replace(')', '_').replace('+', '_').replace('___', '_').lower().replace((table['delimiter']+'limit'+table['delimiter']), (table['delimiter']+'limit_reff'+table['delimiter'])) for w in field_names_list])} text);"""}) arr.append({"sql": f"""drop table if exists {POSTGRES_SCHEMA}.{table['table_name']} cascade; create table {POSTGRES_SCHEMA}.{table['table_name']} ({' text, '.join([w.replace(' ', '_').replace('.', '_').replace('/', '_').replace('(', '_').replace(')', '_').replace('+', '_').replace('___', '_').replace('%', '_').lower().replace((table['delimiter']+'limit'+table['delimiter']), (table['delimiter']+'limit_reff'+table['delimiter'])) for w in field_names_list])} text);"""})
return arr return arr
def csv_clean_syntax(ti): def csv_clean_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = [] arr = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""") as csvFile: #with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""", encoding = "ISO-8859-1") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""") #reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__() #field_names_list = reader.__next__()
arr.append({"bash_command": f"""echo 'OK' """ if table['sed_command'] == 'nil' else (f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} > {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} && """.join(table['sed_command'].split('|;|;|')) + f""" {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} > {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""")}) arr.append({"bash_command": f"""echo 'OK' """ if table['sed_command'] == 'nil' else (f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} > {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} && """.join(table['sed_command'].replace('[LOCAL_PATH]' ,f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""").split('|;|;|')) + f""" {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']} > {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk && mv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}_bk {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""")})
return arr return arr
...@@ -92,7 +92,7 @@ def sql_clean_syntax(ti): ...@@ -92,7 +92,7 @@ def sql_clean_syntax(ti):
iris = ti.xcom_pull(task_ids=['ds_list_extractor']) iris = ti.xcom_pull(task_ids=['ds_list_extractor'])
arr = [] arr = []
for table in json.loads(iris[0][0][0]): for table in json.loads(iris[0][0][0]):
with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""") as csvFile: with open(f"""{Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/{table['file_id']}""", encoding = "ISO-8859-1") as csvFile:
reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""") reader = csv.reader(csvFile, delimiter=f"""{table['delimiter']}""")
field_names_list = reader.__next__() field_names_list = reader.__next__()
arr.append({"sql": f"""select 'OK' """ if table['sql_command'] == 'nil' else f"""{";".join(table['sql_command'].split('|;|;|')).replace('T24_SOURCE', POSTGRES_SCHEMA)}"""}) arr.append({"sql": f"""select 'OK' """ if table['sql_command'] == 'nil' else f"""{";".join(table['sql_command'].split('|;|;|')).replace('T24_SOURCE', POSTGRES_SCHEMA)}"""})
...@@ -114,37 +114,27 @@ with DAG("APJ_1_t24_interface", ...@@ -114,37 +114,27 @@ with DAG("APJ_1_t24_interface",
} }
) )
sftp_xx = BashOperator( sftp_neraca = BashOperator(
task_id="sftp_xx", task_id="sftp_neraca",
bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:/REPORT.BP/NOM/TREASURY/*{yesterday_nodash}* {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""", bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:bnk.interface/REPORT.BP/NERACA/{yesterday_nodash}/ID0010001/* {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""",
)
sftp_nominatif = BashOperator(
task_id="sftp_nominatif",
bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:bnk.interface/NOMINATIF*/{yesterday_nodash}/ID0010001* {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""",
) )
sftp_ppap = BashOperator( sftp_ppap = BashOperator(
task_id="sftp_ppap", task_id="sftp_ppap",
bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:/PPAP.NOMINATIF/*{yesterday_nodash}*COB.csv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""", bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r -P 2222 {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:bnk.interface/PPAP.NOMINATIF/{yesterday_nodash}.PPAP* {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/""",
) )
# sftp_neraca = BashOperator(
# task_id="sftp_xx",
# bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:/REPORT.BP/NERACA/*{yesterday_nodash}* {Variable.get("LOCAL_PATH")}t24_neraca/{yesterday_nodash}/""",
# )
# sftp_neraca_tel = BashOperator(
# task_id="sftp_xx",
# bash_command=f"""sshpass -p {Variable.get("SFTP_T24_PASSWORD")} sftp -o StrictHostKeyChecking=no -r {Variable.get("SFTP_T24_USER")}@{Variable.get("SFTP_T24_HOST")}:/REPORT.BP/NERACA/*{yesterday_nodash}* {Variable.get("LOCAL_PATH")}t24_neraca/{yesterday_nodash}/""",
# )
ds_list_extractor = PythonOperator( ds_list_extractor = PythonOperator(
task_id='ds_list_extractor', task_id='ds_list_extractor',
python_callable=ds_list_extractor, python_callable=ds_list_extractor,
do_xcom_push=True do_xcom_push=True
) )
# ft_reve = BashOperator(
# task_id="ft_reve",
# bash_command=f"""head -n -2 {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/FT.REVERSE.csv > {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/FT.REVERSE.2.csv && mv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/FT.REVERSE.2.csv {Variable.get("LOCAL_PATH")}t24/{yesterday_nodash}/FT.REVERSE.csv""",
# )
csv_clean_syntax = PythonOperator( csv_clean_syntax = PythonOperator(
task_id='csv_clean_syntax', task_id='csv_clean_syntax',
python_callable=csv_clean_syntax python_callable=csv_clean_syntax
...@@ -158,17 +148,6 @@ with DAG("APJ_1_t24_interface", ...@@ -158,17 +148,6 @@ with DAG("APJ_1_t24_interface",
) )
# pg_drop_schema = PostgresOperator(
# sql= f"""DROP TABLE IF EXISTS {POSTGRES_SCHEMA} CASCADE;""",
# task_id="pg_drop_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
#
# pg_create_schema = PostgresOperator(
# sql= f"""create schema IF NOT EXISTS {POSTGRES_SCHEMA};""",
# task_id="pg_create_schema",
# postgres_conn_id=POSTGRES_CONN_ID,
# )
stop_task = ShortCircuitOperator( stop_task = ShortCircuitOperator(
task_id="stop_task", task_id="stop_task",
...@@ -287,4 +266,4 @@ with DAG("APJ_1_t24_interface", ...@@ -287,4 +266,4 @@ with DAG("APJ_1_t24_interface",
begin >> sftp_xx >> sftp_ppap >> ds_list_extractor >> csv_clean_syntax >> clean_csv >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> sql_clean_syntax >> ds_csv_to_table >> ds_clean_data >> stop_task >> ds_create_table_history_nominatif >> ds_to_history >> set_access_schemma >> set_access_all_table >> pentaho >> zip_today >> delete_before >> history_finish begin >> sftp_neraca >> sftp_nominatif >> sftp_ppap >> ds_list_extractor >> csv_clean_syntax >> clean_csv >> pg_ddl_syntax >> pg_create_table >> ds_push_syntax >> sql_clean_syntax >> ds_csv_to_table >> ds_clean_data >> stop_task >> ds_create_table_history_nominatif >> ds_to_history >> set_access_schemma >> set_access_all_table >> pentaho >> zip_today >> delete_before >> history_finish
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment