Put step 1 (the csv download) and step 2 (the csv upload) into a subdag, and then trigger it via the SubDagOperator with the executor
option set to a SequentialExecutor
- this will ensure that steps 1 and 2 run on the same worker.
Here is a working DAG file illustrating that concept (with the actual operations stubbed out as DummyOperators), with the download/upload steps in the context of some larger process:
from datetime import datetime, timedelta
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.subdag_operator import SubDagOperator
from airflow.executors.sequential_executor import SequentialExecutor
PARENT_DAG_NAME='subdaggy'
CHILD_DAG_NAME='subby'
def make_sub_dag(parent_dag_name, child_dag_name, start_date, schedule_interval):
dag = DAG(
'%s.%s' % (parent_dag_name, child_dag_name),
schedule_interval=schedule_interval,
start_date=start_date
)
task_download = DummyOperator(
task_id = 'task_download_csv',
dag=dag
)
task_upload = DummyOperator(
task_id = 'task_upload_csv',
dag=dag
)
task_download >> task_upload
return dag
main_dag = DAG(
PARENT_DAG_NAME,
schedule_interval=None,
start_date=datetime(2017,1,1)
)
main_task_1 = DummyOperator(
task_id = 'main_1',
dag = main_dag
)
main_task_2 = SubDagOperator(
task_id = CHILD_DAG_NAME,
subdag=make_sub_dag(PARENT_DAG_NAME, CHILD_DAG_NAME, main_dag.start_date, main_dag.schedule_interval),
executor=SequentialExecutor(),
dag=main_dag
)
main_task_3 = DummyOperator(
task_id = 'main_3',
dag = main_dag
)
main_task_1 >> main_task_2 >> main_task_3