1

I am trying to write a custom operator and sensor in apache-airflow. It basically has 3 operators and 1 sensor the first operator/task will call some python method and print on the console some message. After that 2nd operator will be called which is a custom operator placed inside a plugins folder in a file named "custom_operator.py". which will insert the data in mongo db database. Then a the custom sensor is called which is using mongo_hook which will monitor the db ans check for the value in db. It is also inside the same file custom_operator.py inside plugins. After this a simple python operator is called.

I have already tried: Can't import Airflow plugins

``` 
home/autotest/airflow/dags/custom_dag1.py


import logging

from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from datetime import date_time, timedelta
from airflow.operators import InsertDb
from airflow.operators import DbSensor



log = logging.getLogger(__name__)

defaultArgs = {
    enter code here'owner': 'mohit_saumik',
    'depends_on_past': False,
    'start_date': date_time(2019,04,11,10,21,23)
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1)
}

# creating first operator which will print on the console.
def print_operator_one():
    log.info("Operator One is executed.")
    return "Operator One is executed and returned"



# Creating third operator which will print on the console.
def print_operator_third():
    log.info("Operator three is executed")
    return "Operator two is executed and returned"


# Creating DAG
dag = DAG('custom_dag', default_args = defaultArgs, schedule_interval=timedelta(minutes=10))


# Creating task 1
operator_one_task = PythonOperator(task_id="task_1", python_callable="print_operator_one", dag=dag)


# Creating task 2
operator_two_task = InsertDb(my_operator_param="This is custom Operator", task_id="task_2", dag=dag)


# Creating Task 3
sensor_one_task = DbSensor(task_id="task_3", poke_interval=10, dag=dag, collection="demoCollection", query={"key1": "value1"})


# Creating task 4
operator_three_task = PythonOperator(task_id="task_4", python_callable="print_operator_third", dag=dag)


# Creating flow
operator_one_task >> operator_two_task >> sensor_one_task >> operator_three_task


```
    home/autotest/airflow/plugins/custom_operator.py



    import logging 

    from airflow.models import BaseOperator
    from airflow.plugins_manager import AirflowPlugin
    from airflow.utils.decorator import apply_defaults
    from airflow.contrib.hooks.mongo_hook import MongoHook
    from airflow.operators.sensors import BaseSensorOperator
    from datetime import datetime


    log = logging.getLogger(__name__)


    class InsertDb(BaseOperator):

        @apply_defaults
        def __init__(self, my_operator_param, *args, **kwargs):
            self.operator_param = my_operator_param
            super(InsertDb, self).__init__(*args, **kwargs)


        def execute(self, context):
            log.info("Inserting into the DB!")

            db_hook = MongoHook(self, conn_id="https://localhost,localhost:27017/mydb")
            db_conn = db_hook.get_conn()
            insertSuccess = db_conn.insert_one(mongo_collection="demoCollection",doc = {"key1": "value1"}, mongo_db="mydb" )

            log.info(insertSuccess)




    class DbSensor(BaseSensorOperator):

        @apply_defaults
        def __init__(self, collection, query, mongo_conn_id="mongo_default", *args, **kwargs):
            super(DbSensor,self).__init__(*args,**kwargs)



        def poke(self,context):
            db_hook = MongoHook(self, conn_id="https://localhost,localhost:27017/mydb")
            db_conn = db_hook.get_conn()
            result = db_conn.find(mongo_collection=collection, query=query, mongodb="mydb")

            if result is None:
                log.info("Data not available in DB")
                return False
            else:
                log.info("Data is available in DB")
                return True




    class DbPlugin(AirflowPlugin):
        name = "db_plugin"
        operators = [InsertDb, DbSensor]


I am not able to launch the webserver.
Getting the errors:

[2019-04-12 12:35:16,046] {models.py:377} ERROR - Failed to import: /home/autotest/airflow/dags/custom_dag1.py
Traceback (most recent call last):
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/models.py", line 374, in process_file
    m = imp.load_source(mod_name, filepath)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/imp.py", line 172, in load_source
    module = _load(spec)
  File "<frozen importlib._bootstrap>", line 684, in _load
  File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
  File "<frozen importlib._bootstrap_external>", line 678, in exec_module
  File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
  File "/home/autotest/airflow/dags/custom_dag1.py", line 41, in <module>
    operator_one_task = PythonOperator(task_id="task_1",python_callable="print_operator_one", dag=dag)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/utils/decorators.py", line 98, in wrapper
    result = func(*args, **kwargs)
  File "/home/autotest/virtualenv/airflow/lib/python3.6/site-packages/airflow/operators/python_operator.py", line 81, in __init__
    raise AirflowException('`python_callable` param must be callable')
airflow.exceptions.AirflowException: `python_callable` param must be callable

1 Answers1

1

Do it without the quotes: python_callable=print_operator_third. This way you are passing a callable instead of a string.

bosnjak
  • 8,424
  • 2
  • 21
  • 47