Skip to content

Instantly share code, notes, and snippets.

@diggzhang
Created April 18, 2018 07:26
Show Gist options
  • Save diggzhang/f433af6a26266baa68b059f8262cb40f to your computer and use it in GitHub Desktop.
Save diggzhang/f433af6a26266baa68b059f8262cb40f to your computer and use it in GitHub Desktop.
[2018-04-18 15:04:17,851] {jobs.py:368} INFO - Started process (PID=5940) to work on /h
ome/master/airflow/dags/test_local_executor.py
[2018-04-18 15:04:17,853] {jobs.py:368} INFO - Started process (PID=5941) to work on /h
ome/master/airflow/dags/daily_report_production_version_bash_op.py
[2018-04-18 15:04:17,862] {jobs.py:1546} INFO - Exited execute loop
[2018-04-18 15:04:17,914] {jobs.py:1560} INFO - Terminating child PID: 5940
[2018-04-18 15:04:17,915] {jobs.py:1560} INFO - Terminating child PID: 5941
[2018-04-18 15:04:17,915] {jobs.py:1564} INFO - Waiting up to 5 seconds for processes t
o exit...
[2018-04-18 15:04:17,963] {jobs.py:379} ERROR - Got an exception! Propagating...
Traceback (most recent call last):
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/airflow/jobs.py", line 369, in helper
scheduler_job = SchedulerJob(dag_ids=dag_id_white_list, log=log)
File "<string>", line 4, in __init__
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/sqlalchemy/orm/state.py", line 417, in _initialize_instance
manager.dispatch.init_failure(self, args, kwargs)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/sqlalchemy/util/langhelpers.py", line 66, in __exit__
compat.reraise(exc_type, exc_value, exc_tb)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/sqlalchemy/orm/state.py", line 414, in _initialize_instance
return manager.original_init(*mixed[1:], **kwargs)
File "/home/master/.pyenv/versions/2.7.10/env[2018-04-18 15:04:17,962] {jobs.py:379}
ERROR - Got an exception! Propagating...
Traceback (most recent call last):
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/airflow/jobs.py", line 369, in helper
scheduler_job = SchedulerJob(dag_ids=dag_id_white_list, log=log)
File "<string>", line 4, in __init__
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/sqlalchemy/orm/state.py", line 417, in _initialize_instance
manager.dispatch.init_failure(self, args, kwargs)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-
packages/sqlalchemy/util/langhelpers.py", line 66, in __exit__
compat.reraise(exc_type, exc_value, exc_tb)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/orm/state.py", line 414, in _initialize_instance
return manager.original_init(*mixed[1:], **kwargs)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 551, in __init__
s/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 551, in __init__
super(SchedulerJob, self).__init__(*args, **kwargs)
super(SchedulerJob, self).__init__(*args, **kwargs)
File "<string>", line 6, in __init__
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 99, in __init__
File "<string>", line 6, in __init__
self.hostname = socket.getfqdn()
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 99, in __init__
File "/home/master/.pyenv/versions/2.7.10/lib/python2.7/socket.py", line 141, in getfqdn
self.hostname = socket.getfqdn()
hostname, aliases, ipaddrs = gethostbyaddr(name)
File "/home/master/.pyenv/versions/2.7.10/lib/python2.7/socket.py", line 141, in getfqdn
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/bin/cli.py", line 73, in sigint_handler
hostname, aliases, ipaddrs = gethostbyaddr(name)
sys.exit(0)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/bin/cli.py", line 73, in sigint_handler
SystemExit: 0
sys.exit(0)
SystemExit: 0
Traceback (most recent call last):
File "/home/master/.pyenv/versions/airflow_env_py2710/bin/airflow", line 27, in <module>
args.func(args)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/bin/cli.py", line 826, in scheduler
job.run()
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 198, in run
self._execute()
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 1544, in _execute
self._execute_helper(processor_manager)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 1659, in _execute_helper
(State.SCHEDULED,))
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/utils/db.py", line 50, in wrapper
result = func(*args, **kwargs)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 1332, in _execute_task_instances
session=session)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/utils/db.py", line 50, in wrapper
result = func(*args, **kwargs)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/airflow/jobs.py", line 1074, in _find_executable_task_instances
task_instances_to_examine = ti_query.all()
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/orm/query.py", line 2737, in all
return list(self)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/orm/query.py", line 2889, in __iter__
return self._execute_and_instances(context)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/orm/query.py", line 2912, in _execute_and_instances
result = conn.execute(querycontext.statement, self._params)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 948, in execute
return meth(self, multiparams, params)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 269, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1060, in _execute_clauseelement
compiled_sql, distilled_params
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1200, in _execute_context
context)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1413, in _handle_dbapi_exception
exc_info
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/util/compat.py", line 203, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1193, in _execute_context
context)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/sqlalchemy/engine/default.py", line 507, in do_execute
cursor.execute(statement, parameters)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/MySQLdb/cursors.py", line 250, in execute
self.errorhandler(self, exc, value)
File "/home/master/.pyenv/versions/2.7.10/envs/airflow_env_py2710/lib/python2.7/site-packages/MySQLdb/connections.py", line 50, in defaulterrorhandler
raise errorvalue
sqlalchemy.exc.OperationalError: (_mysql_exceptions.OperationalError) (1054, "Unknown column 'task_instance.max_tries' in 'field list'") [SQL: u'SELECT task_instance.try_number AS task_instance_try_number, task_instance.task_id AS task_instance_task_id, task_instance.dag_id AS task_instance_dag_id, task_instance.execution_date AS task_instance_execution_date, task_instance.start_date AS task_instance_start_date, task_instance.end_date AS task_instance_end_date, task_instance.duration AS task_instance_duration, task_instance.state AS task_instance_state, task_instance.max_tries AS task_instance_max_tries, task_instance.hostname AS task_instance_hostname, task_instance.unixname AS task_instance_unixname, task_instance.job_id AS task_instance_job_id, task_instance.pool AS task_instance_pool, task_instance.queue AS task_instance_queue, task_instance.priority_weight AS task_instance_priority_weight, task_instance.operator AS task_instance_operator, task_instance.queued_dttm AS task_instance_queued_dttm, task_instance.pid AS task_instance_pid \nFROM task_instance LEFT OUTER JOIN dag_run ON dag_run.dag_id = task_instance.dag_id AND dag_run.execution_date = task_instance.execution_date LEFT OUTER JOIN dag ON dag.dag_id = task_instance.dag_id \nWHERE task_instance.dag_id IN (%s) AND (dag_run.run_id IS NULL OR dag_run.run_id NOT LIKE %s) AND (dag.dag_id IS NULL OR dag.is_paused = 0) AND task_instance.state IN (%s)'] [parameters: ('daily_reporter_airflow_bash_operator', u'backfill_%', u'scheduled')] (Background on this error at: http://sqlalche.me/e/e3q8)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment