_id
stringlengths
98
184
text
stringlengths
91
10.9k
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L458-L467
def clear_xcom_data(self, session=None): """ """ session.query(XCom).filter( XCom.dag_id == self.dag_id, XCom.task_id == self.task_id, XCom.execution_date == self.execution_date ).delete() session.commit()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L142-L150
def get_dag_code(dag_id): """""" try: return get_code(dag_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L382-L445
def create_bucket(self, bucket_name, resource=None, storage_class='MULTI_REGIONAL', location='US', project_id=None, labels=None ): """ """ self.log.info('Creating Bucket: %s; Location: %s; Storage Class: %s', bucket_name, location, storage_class) client = self.get_conn() bucket = client.bucket(bucket_name=bucket_name) bucket_resource = resource or {} for item in bucket_resource: if item != "name": bucket._patch_property(name=item, value=resource[item]) bucket.storage_class = storage_class bucket.labels = labels or {} bucket.create(project=project_id, location=location) return bucket.id
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L50-L59
def _get_api_key(self): """ """ conn = self.get_connection(self.http_conn_id) api_key = conn.password if not api_key: raise AirflowException('Opsgenie API Key is required for this hook, ' 'please check your conn_id configuration.') return api_key
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L766-L894
def create_dag_run(self, dag, session=None): """ """ if dag.schedule_interval and conf.getboolean('scheduler', 'USE_JOB_SCHEDULE'): active_runs = DagRun.find( dag_id=dag.dag_id, state=State.RUNNING, external_trigger=False, session=session ) # return if already reached maximum active runs and no timeout setting if len(active_runs) >= dag.max_active_runs and not dag.dagrun_timeout: return timedout_runs = 0 for dr in active_runs: if ( dr.start_date and dag.dagrun_timeout and dr.start_date < timezone.utcnow() - dag.dagrun_timeout): dr.state = State.FAILED dr.end_date = timezone.utcnow() dag.handle_callback(dr, success=False, reason='dagrun_timeout', session=session) timedout_runs += 1 session.commit() if len(active_runs) - timedout_runs >= dag.max_active_runs: return # this query should be replaced by find dagrun qry = ( session.query(func.max(DagRun.execution_date)) .filter_by(dag_id=dag.dag_id) .filter(or_( DagRun.external_trigger == False, # noqa: E712 # add % as a wildcard for the like query DagRun.run_id.like(DagRun.ID_PREFIX + '%') )) ) last_scheduled_run = qry.scalar() # don't schedule @once again if dag.schedule_interval == '@once' and last_scheduled_run: return None # don't do scheduler catchup for dag's that don't have dag.catchup = True if not (dag.catchup or dag.schedule_interval == '@once'): # The logic is that we move start_date up until # one period before, so that timezone.utcnow() is AFTER # the period end, and the job can be created... now = timezone.utcnow() next_start = dag.following_schedule(now) last_start = dag.previous_schedule(now) if next_start <= now: new_start = last_start else: new_start = dag.previous_schedule(last_start) if dag.start_date: if new_start >= dag.start_date: dag.start_date = new_start else: dag.start_date = new_start next_run_date = None if not last_scheduled_run: # First run task_start_dates = [t.start_date for t in dag.tasks] if task_start_dates: next_run_date = dag.normalize_schedule(min(task_start_dates)) self.log.debug( "Next run date based on tasks %s", next_run_date ) else: next_run_date = dag.following_schedule(last_scheduled_run) # make sure backfills are also considered last_run = dag.get_last_dagrun(session=session) if last_run and next_run_date: while next_run_date <= last_run.execution_date: next_run_date = dag.following_schedule(next_run_date) # don't ever schedule prior to the dag's start_date if dag.start_date: next_run_date = (dag.start_date if not next_run_date else max(next_run_date, dag.start_date)) if next_run_date == dag.start_date: next_run_date = dag.normalize_schedule(dag.start_date) self.log.debug( "Dag start date: %s. Next run date: %s", dag.start_date, next_run_date ) # don't ever schedule in the future or if next_run_date is None if not next_run_date or next_run_date > timezone.utcnow(): return # this structure is necessary to avoid a TypeError from concatenating # NoneType if dag.schedule_interval == '@once': period_end = next_run_date elif next_run_date: period_end = dag.following_schedule(next_run_date) # Don't schedule a dag beyond its end_date (as specified by the dag param) if next_run_date and dag.end_date and next_run_date > dag.end_date: return # Don't schedule a dag beyond its end_date (as specified by the task params) # Get the min task end date, which may come from the dag.default_args min_task_end_date = [] task_end_dates = [t.end_date for t in dag.tasks if t.end_date] if task_end_dates: min_task_end_date = min(task_end_dates) if next_run_date and min_task_end_date and next_run_date > min_task_end_date: return if next_run_date and period_end and period_end <= timezone.utcnow(): next_run = dag.create_dagrun( run_id=DagRun.ID_PREFIX + next_run_date.isoformat(), execution_date=next_run_date, start_date=timezone.utcnow(), state=State.RUNNING, external_trigger=False ) return next_run
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L223-L298
def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None): """ """ queued_tis = self.executor.queued_tasks # also consider running as the state might not have changed in the db yet running_tis = self.executor.running resettable_states = [State.SCHEDULED, State.QUEUED] TI = models.TaskInstance DR = models.DagRun if filter_by_dag_run is None: resettable_tis = ( session .query(TI) .join( DR, and_( TI.dag_id == DR.dag_id, TI.execution_date == DR.execution_date)) .filter( DR.state == State.RUNNING, DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'), TI.state.in_(resettable_states))).all() else: resettable_tis = filter_by_dag_run.get_task_instances(state=resettable_states, session=session) tis_to_reset = [] # Can't use an update here since it doesn't support joins for ti in resettable_tis: if ti.key not in queued_tis and ti.key not in running_tis: tis_to_reset.append(ti) if len(tis_to_reset) == 0: return [] def query(result, items): filter_for_tis = ([and_(TI.dag_id == ti.dag_id, TI.task_id == ti.task_id, TI.execution_date == ti.execution_date) for ti in items]) reset_tis = ( session .query(TI) .filter(or_(*filter_for_tis), TI.state.in_(resettable_states)) .with_for_update() .all()) for ti in reset_tis: ti.state = State.NONE session.merge(ti) return result + reset_tis reset_tis = helpers.reduce_in_chunks(query, tis_to_reset, [], self.max_tis_per_query) task_instance_str = '\n\t'.join( [repr(x) for x in reset_tis]) session.commit() self.log.info( "Reset the following %s TaskInstances:\n\t%s", len(reset_tis), task_instance_str ) return reset_tis
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L1442-L1484
def _process_executor_events(self, simple_dag_bag, session=None): """ """ # TODO: this shares quite a lot of code with _manage_executor_state TI = models.TaskInstance for key, state in list(self.executor.get_event_buffer(simple_dag_bag.dag_ids) .items()): dag_id, task_id, execution_date, try_number = key self.log.info( "Executor reports execution of %s.%s execution_date=%s " "exited with status %s for try_number %s", dag_id, task_id, execution_date, state, try_number ) if state == State.FAILED or state == State.SUCCESS: qry = session.query(TI).filter(TI.dag_id == dag_id, TI.task_id == task_id, TI.execution_date == execution_date) ti = qry.first() if not ti: self.log.warning("TaskInstance %s went missing from the database", ti) continue # TODO: should we fail RUNNING as well, as we do in Backfills? if ti.try_number == try_number and ti.state == State.QUEUED: msg = ("Executor reports task instance {} finished ({}) " "although the task says its {}. Was the task " "killed externally?".format(ti, state, ti.state)) self.log.error(msg) try: simple_dag = simple_dag_bag.get_dag(dag_id) dagbag = models.DagBag(simple_dag.full_filepath) dag = dagbag.get_dag(dag_id) ti.task = dag.get_task(task_id) ti.handle_failure(msg) except Exception: self.log.error("Cannot load the dag bag to handle failure for %s" ". Setting task to FAILED without callbacks or " "retries. Do you have enough resources?", ti) ti.state = State.FAILED session.merge(ti) session.commit()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L97-L127
def has_dag_access(**dag_kwargs): """ """ def decorator(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): has_access = self.appbuilder.sm.has_access dag_id = request.args.get('dag_id') # if it is false, we need to check whether user has write access on the dag can_dag_edit = dag_kwargs.get('can_dag_edit', False) # 1. check whether the user has can_dag_edit permissions on all_dags # 2. if 1 false, check whether the user # has can_dag_edit permissions on the dag # 3. if 2 false, check whether it is can_dag_read view, # and whether user has the permissions if ( has_access('can_dag_edit', 'all_dags') or has_access('can_dag_edit', dag_id) or (not can_dag_edit and (has_access('can_dag_read', 'all_dags') or has_access('can_dag_read', dag_id)))): return f(self, *args, **kwargs) else: flash("Access is Denied", "danger") return redirect(url_for(self.appbuilder.sm.auth_view. __class__.__name__ + ".login")) return wrapper return decorator
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_bigtable_hook.py#L90-L168
def create_instance(self, instance_id, main_cluster_id, main_cluster_zone, project_id=None, replica_cluster_id=None, replica_cluster_zone=None, instance_display_name=None, instance_type=enums.Instance.Type.TYPE_UNSPECIFIED, instance_labels=None, cluster_nodes=None, cluster_storage_type=enums.StorageType.STORAGE_TYPE_UNSPECIFIED, timeout=None): """ """ cluster_storage_type = enums.StorageType(cluster_storage_type) instance_type = enums.Instance.Type(instance_type) instance = Instance( instance_id, self._get_client(project_id=project_id), instance_display_name, instance_type, instance_labels, ) clusters = [ instance.cluster( main_cluster_id, main_cluster_zone, cluster_nodes, cluster_storage_type ) ] if replica_cluster_id and replica_cluster_zone: clusters.append(instance.cluster( replica_cluster_id, replica_cluster_zone, cluster_nodes, cluster_storage_type )) operation = instance.create( clusters=clusters ) operation.result(timeout) return instance
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L123-L133
def wasb_log_exists(self, remote_log_location): """ """ try: return self.hook.check_for_blob(self.wasb_container, remote_log_location) except Exception: pass return False
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L488-L501
def _main(): """ """ # Parse arguments usage = "usage: nvd3.py [options]" parser = OptionParser(usage=usage, version=("python-nvd3 - Charts generator with " "nvd3.js and d3.js")) parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print messages to stdout") (options, args) = parser.parse_args()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_text_to_speech_hook.py#L53-L80
def synthesize_speech(self, input_data, voice, audio_config, retry=None, timeout=None): """ """ client = self.get_conn() self.log.info("Synthesizing input: %s" % input_data) return client.synthesize_speech( input_=input_data, voice=voice, audio_config=audio_config, retry=retry, timeout=timeout )
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L189-L220
def insert_instance_template(self, body, request_id=None, project_id=None): """ """ response = self.get_conn().instanceTemplates().insert( project=project_id, body=body, requestId=request_id ).execute(num_retries=self.num_retries) try: operation_name = response["name"] except KeyError: raise AirflowException( "Wrong response '{}' returned - it should contain " "'name' field".format(response)) self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L68-L95
def close(self): """ """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple # times when `logging.shutdown` is called. if self.closed: return super().close() if not self.upload_on_close: return local_loc = os.path.join(self.local_base, self.log_relative_path) remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions with open(local_loc, 'r') as logfile: log = logfile.read() self.wasb_write(log, remote_loc, append=True) if self.delete_local_copy: shutil.rmtree(os.path.dirname(local_loc)) # Mark closed so we don't double write if close is called twice self.closed = True
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L139-L189
def heartbeat(self): """ """ try: with create_session() as session: job = session.query(BaseJob).filter_by(id=self.id).one() make_transient(job) session.commit() if job.state == State.SHUTDOWN: self.kill() is_unit_test = conf.getboolean('core', 'unit_test_mode') if not is_unit_test: # Figure out how long to sleep for sleep_for = 0 if job.latest_heartbeat: seconds_remaining = self.heartrate - \ (timezone.utcnow() - job.latest_heartbeat)\ .total_seconds() sleep_for = max(0, seconds_remaining) sleep(sleep_for) # Update last heartbeat time with create_session() as session: job = session.query(BaseJob).filter(BaseJob.id == self.id).first() job.latest_heartbeat = timezone.utcnow() session.merge(job) session.commit() self.heartbeat_callback(session=session) self.log.debug('[heartbeat]') except OperationalError as e: self.log.error("Scheduler heartbeat got an exception: %s", str(e))
https://github.com/soimort/you-get/blob/b746ac01c9f39de94cac2d56f665285b0523b974/src/you_get/extractors/wanmen.py#L37-L51
def wanmen_download_by_course(json_api_content, output_dir='.', merge=True, info_only=False, **kwargs): """""" for tIndex in range(len(json_api_content[0]['Topics'])): for pIndex in range(len(json_api_content[0]['Topics'][tIndex]['Parts'])): wanmen_download_by_course_topic_part(json_api_content, tIndex, pIndex, output_dir=output_dir, merge=merge, info_only=info_only, **kwargs)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_fileshare_hook.py#L40-L45
def get_conn(self): """""" conn = self.get_connection(self.conn_id) service_options = conn.extra_dejson return FileService(account_name=conn.login, account_key=conn.password, **service_options)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/databricks_operator.py#L61-L92
def _handle_databricks_operator_execution(operator, hook, log, context): """ """ if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id) log.info('Run submitted with run_id: %s', operator.run_id) run_page_url = hook.get_run_page_url(operator.run_id) if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_PAGE_URL_KEY, value=run_page_url) log.info('View run status, Spark UI, and logs at %s', run_page_url) while True: run_state = hook.get_run_state(operator.run_id) if run_state.is_terminal: if run_state.is_successful: log.info('%s completed successfully.', operator.task_id) log.info('View run status, Spark UI, and logs at %s', run_page_url) return else: error_message = '{t} failed with terminal state: {s}'.format( t=operator.task_id, s=run_state) raise AirflowException(error_message) else: log.info('%s in run state: %s', operator.task_id, run_state) log.info('View run status, Spark UI, and logs at %s', run_page_url) log.info('Sleeping for %s seconds.', operator.polling_period_seconds) time.sleep(operator.polling_period_seconds)
https://github.com/soimort/you-get/blob/b746ac01c9f39de94cac2d56f665285b0523b974/src/you_get/extractors/showroom.py#L11-L24
def showroom_get_roomid_by_room_url_key(room_url_key): """""" fake_headers_mobile = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'UTF-8,*;q=0.5', 'Accept-Encoding': 'gzip,deflate,sdch', 'Accept-Language': 'en-US,en;q=0.8', 'User-Agent': 'Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Mobile Safari/537.36' } webpage_url = 'https://www.showroom-live.com/' + room_url_key html = get_content(webpage_url, headers = fake_headers_mobile) roomid = match1(html, r'room\?room_id\=(\d+)') assert roomid return roomid
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/__init__.py#L21-L27
def _integrate_plugins(): """""" import sys from airflow.plugins_manager import operators_modules for operators_module in operators_modules: sys.modules[operators_module.__name__] = operators_module globals()[operators_module._name] = operators_module
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L2445-L2458
def _set_unfinished_dag_runs_to_failed(self, dag_runs, session=None): """ """ for dag_run in dag_runs: dag_run.update_state() if dag_run.state not in State.finished(): dag_run.set_state(State.FAILED) session.merge(dag_run)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/hive_hooks.py#L529-L554
def check_for_partition(self, schema, table, partition): """ """ with self.metastore as client: partitions = client.get_partitions_by_filter( schema, table, partition, 1) if partitions: return True else: return False
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L221-L247
def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ """ self.log.info( "Fetching cluster (project_id=%s, zone=%s, cluster_name=%s)", project_id or self.project_id, self.location, name ) return self.get_client().get_cluster(project_id=project_id or self.project_id, zone=self.location, cluster_id=name, retry=retry, timeout=timeout).self_link
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/slack_operator.py#L79-L87
def execute(self, **kwargs): """ """ if not self.api_params: self.construct_api_call_params() slack = SlackHook(token=self.token, slack_conn_id=self.slack_conn_id) slack.call(self.method, self.api_params)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/celery_queue_sensor.py#L49-L62
def _check_task_id(self, context): """ """ ti = context['ti'] celery_result = ti.xcom_pull(task_ids=self.target_task_id) return celery_result.ready()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/get_dag_run_state.py#L24-L44
def get_dag_run_state(dag_id, execution_date): """""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) # Get DagRun object and check that it exists dagrun = dag.get_dagrun(execution_date=execution_date) if not dagrun: error_message = ('Dag Run for date {} not found in dag {}' .format(execution_date, dag_id)) raise DagRunNotFound(error_message) return {'state': dagrun.get_state()}
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/databricks_operator.py#L34-L58
def _deep_string_coerce(content, json_path='json'): """ """ c = _deep_string_coerce if isinstance(content, six.string_types): return content elif isinstance(content, six.integer_types + (float,)): # Databricks can tolerate either numeric or string types in the API backend. return str(content) elif isinstance(content, (list, tuple)): return [c(e, '{0}[{1}]'.format(json_path, i)) for i, e in enumerate(content)] elif isinstance(content, dict): return {k: c(v, '{0}[{1}]'.format(json_path, k)) for k, v in list(content.items())} else: param_type = type(content) msg = 'Type {0} used for parameter {1} is not a number or a string' \ .format(param_type, json_path) raise AirflowException(msg)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_sql_hook.py#L343-L368
def _wait_for_operation_to_complete(self, project_id, operation_name): """ """ service = self.get_conn() while True: operation_response = service.operations().get( project=project_id, operation=operation_name, ).execute(num_retries=self.num_retries) if operation_response.get("status") == CloudSqlOperationStatus.DONE: error = operation_response.get("error") if error: # Extracting the errors list as string and trimming square braces error_msg = str(error.get("errors"))[1:-1] raise AirflowException(error_msg) # No meaningful info to return from the response in case of success return time.sleep(TIME_TO_SLEEP_IN_SECONDS)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L1669-L1787
def process_file(self, file_path, zombies, pickle_dags=False, session=None): """ """ self.log.info("Processing file %s for tasks to queue", file_path) # As DAGs are parsed from this file, they will be converted into SimpleDags simple_dags = [] try: dagbag = models.DagBag(file_path, include_examples=False) except Exception: self.log.exception("Failed at reloading the DAG file %s", file_path) Stats.incr('dag_file_refresh_error', 1, 1) return [] if len(dagbag.dags) > 0: self.log.info("DAG(s) %s retrieved from %s", dagbag.dags.keys(), file_path) else: self.log.warning("No viable dags retrieved from %s", file_path) self.update_import_errors(session, dagbag) return [] # Save individual DAGs in the ORM and update DagModel.last_scheduled_time for dag in dagbag.dags.values(): dag.sync_to_db() paused_dag_ids = [dag.dag_id for dag in dagbag.dags.values() if dag.is_paused] # Pickle the DAGs (if necessary) and put them into a SimpleDag for dag_id in dagbag.dags: # Only return DAGs that are not paused if dag_id not in paused_dag_ids: dag = dagbag.get_dag(dag_id) pickle_id = None if pickle_dags: pickle_id = dag.pickle(session).id simple_dags.append(SimpleDag(dag, pickle_id=pickle_id)) if len(self.dag_ids) > 0: dags = [dag for dag in dagbag.dags.values() if dag.dag_id in self.dag_ids and dag.dag_id not in paused_dag_ids] else: dags = [dag for dag in dagbag.dags.values() if not dag.parent_dag and dag.dag_id not in paused_dag_ids] # Not using multiprocessing.Queue() since it's no longer a separate # process and due to some unusual behavior. (empty() incorrectly # returns true?) ti_keys_to_schedule = [] self._process_dags(dagbag, dags, ti_keys_to_schedule) for ti_key in ti_keys_to_schedule: dag = dagbag.dags[ti_key[0]] task = dag.get_task(ti_key[1]) ti = models.TaskInstance(task, ti_key[2]) ti.refresh_from_db(session=session, lock_for_update=True) # We can defer checking the task dependency checks to the worker themselves # since they can be expensive to run in the scheduler. dep_context = DepContext(deps=QUEUE_DEPS, ignore_task_deps=True) # Only schedule tasks that have their dependencies met, e.g. to avoid # a task that recently got its state changed to RUNNING from somewhere # other than the scheduler from getting its state overwritten. # TODO(aoen): It's not great that we have to check all the task instance # dependencies twice; once to get the task scheduled, and again to actually # run the task. We should try to come up with a way to only check them once. if ti.are_dependencies_met( dep_context=dep_context, session=session, verbose=True): # Task starts out in the scheduled state. All tasks in the # scheduled state will be sent to the executor ti.state = State.SCHEDULED # Also save this task instance to the DB. self.log.info("Creating / updating %s in ORM", ti) session.merge(ti) # commit batch session.commit() # Record import errors into the ORM try: self.update_import_errors(session, dagbag) except Exception: self.log.exception("Error logging import errors!") try: dagbag.kill_zombies(zombies) except Exception: self.log.exception("Error killing zombies!") return simple_dags
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L54-L96
def copy(self, source_bucket, source_object, destination_bucket=None, destination_object=None): """ """ destination_bucket = destination_bucket or source_bucket destination_object = destination_object or source_object if source_bucket == destination_bucket and \ source_object == destination_object: raise ValueError( 'Either source/destination bucket or source/destination object ' 'must be different, not both the same: bucket=%s, object=%s' % (source_bucket, source_object)) if not source_bucket or not source_object: raise ValueError('source_bucket and source_object cannot be empty.') client = self.get_conn() source_bucket = client.get_bucket(source_bucket) source_object = source_bucket.blob(source_object) destination_bucket = client.get_bucket(destination_bucket) destination_object = source_bucket.copy_blob( blob=source_object, destination_bucket=destination_bucket, new_name=destination_object) self.log.info('Object %s in bucket %s copied to object %s in bucket %s', source_object.name, source_bucket.name, destination_object.name, destination_bucket.name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/ssh_hook.py#L199-L239
def get_tunnel(self, remote_port, remote_host="localhost", local_port=None): """ """ if local_port: local_bind_address = ('localhost', local_port) else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/postgres_hook.py#L91-L95
def bulk_dump(self, table, tmp_file): """ """ self.copy_expert("COPY {table} TO STDOUT".format(table=table), tmp_file)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L102-L124
def _build_discord_payload(self): """ """ payload = {} if self.username: payload['username'] = self.username if self.avatar_url: payload['avatar_url'] = self.avatar_url payload['tts'] = self.tts if len(self.message) <= 2000: payload['content'] = self.message else: raise AirflowException('Discord message length must be 2000 or fewer ' 'characters.') return json.dumps(payload)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L96-L108
def get_operation(self, operation_name, project_id=None): """ """ return self.get_client().get_operation(project_id=project_id or self.project_id, zone=self.location, operation_id=operation_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/spark_submit_hook.py#L215-L297
def _build_spark_submit_command(self, application): """ """ connection_cmd = self._get_spark_binary_path() # The url ot the spark master connection_cmd += ["--master", self._connection['master']] if self._conf: for key in self._conf: connection_cmd += ["--conf", "{}={}".format(key, str(self._conf[key]))] if self._env_vars and (self._is_kubernetes or self._is_yarn): if self._is_yarn: tmpl = "spark.yarn.appMasterEnv.{}={}" else: tmpl = "spark.kubernetes.driverEnv.{}={}" for key in self._env_vars: connection_cmd += [ "--conf", tmpl.format(key, str(self._env_vars[key]))] elif self._env_vars and self._connection['deploy_mode'] != "cluster": self._env = self._env_vars # Do it on Popen of the process elif self._env_vars and self._connection['deploy_mode'] == "cluster": raise AirflowException( "SparkSubmitHook env_vars is not supported in standalone-cluster mode.") if self._is_kubernetes: connection_cmd += ["--conf", "spark.kubernetes.namespace={}".format( self._connection['namespace'])] if self._files: connection_cmd += ["--files", self._files] if self._py_files: connection_cmd += ["--py-files", self._py_files] if self._archives: connection_cmd += ["--archives", self._archives] if self._driver_class_path: connection_cmd += ["--driver-class-path", self._driver_class_path] if self._jars: connection_cmd += ["--jars", self._jars] if self._packages: connection_cmd += ["--packages", self._packages] if self._exclude_packages: connection_cmd += ["--exclude-packages", self._exclude_packages] if self._repositories: connection_cmd += ["--repositories", self._repositories] if self._num_executors: connection_cmd += ["--num-executors", str(self._num_executors)] if self._total_executor_cores: connection_cmd += ["--total-executor-cores", str(self._total_executor_cores)] if self._executor_cores: connection_cmd += ["--executor-cores", str(self._executor_cores)] if self._executor_memory: connection_cmd += ["--executor-memory", self._executor_memory] if self._driver_memory: connection_cmd += ["--driver-memory", self._driver_memory] if self._keytab: connection_cmd += ["--keytab", self._keytab] if self._principal: connection_cmd += ["--principal", self._principal] if self._name: connection_cmd += ["--name", self._name] if self._java_class: connection_cmd += ["--class", self._java_class] if self._verbose: connection_cmd += ["--verbose"] if self._connection['queue']: connection_cmd += ["--queue", self._connection['queue']] if self._connection['deploy_mode']: connection_cmd += ["--deploy-mode", self._connection['deploy_mode']] # The actual script to execute connection_cmd += [application] # Append any application arguments if self._application_args: connection_cmd += self._application_args self.log.info("Spark-Submit cmd: %s", connection_cmd) return connection_cmd
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/lineage/__init__.py#L85-L140
def prepare_lineage(func): """ """ @wraps(func) def wrapper(self, context, *args, **kwargs): self.log.debug("Preparing lineage inlets and outlets") task_ids = set(self._inlets['task_ids']).intersection( self.get_flat_relative_ids(upstream=True) ) if task_ids: inlets = self.xcom_pull(context, task_ids=task_ids, dag_id=self.dag_id, key=PIPELINE_OUTLETS) inlets = [item for sublist in inlets if sublist for item in sublist] inlets = [DataSet.map_type(i['typeName'])(data=i['attributes']) for i in inlets] self.inlets.extend(inlets) if self._inlets['auto']: # dont append twice task_ids = set(self._inlets['task_ids']).symmetric_difference( self.upstream_task_ids ) inlets = self.xcom_pull(context, task_ids=task_ids, dag_id=self.dag_id, key=PIPELINE_OUTLETS) inlets = [item for sublist in inlets if sublist for item in sublist] inlets = [DataSet.map_type(i['typeName'])(data=i['attributes']) for i in inlets] self.inlets.extend(inlets) if len(self._inlets['datasets']) > 0: self.inlets.extend(self._inlets['datasets']) # outlets if len(self._outlets['datasets']) > 0: self.outlets.extend(self._outlets['datasets']) self.log.debug("inlets: %s, outlets: %s", self.inlets, self.outlets) for dataset in chain(self.inlets, self.outlets): dataset.set_context(context) return func(self, context, *args, **kwargs) return wrapper
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L85-L131
def run(self, endpoint, data=None, headers=None, extra_options=None): """ """ extra_options = extra_options or {} session = self.get_conn(headers) if self.base_url and not self.base_url.endswith('/') and \ endpoint and not endpoint.startswith('/'): url = self.base_url + '/' + endpoint else: url = (self.base_url or '') + (endpoint or '') req = None if self.method == 'GET': # GET uses params req = requests.Request(self.method, url, params=data, headers=headers) elif self.method == 'HEAD': # HEAD doesn't use params req = requests.Request(self.method, url, headers=headers) else: # Others use data req = requests.Request(self.method, url, data=data, headers=headers) prepped_request = session.prepare_request(req) self.log.info("Sending '%s' to url: %s", self.method, url) return self.run_and_check(session, prepped_request, extra_options)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/hive_hooks.py#L858-L918
def to_csv( self, hql, csv_filepath, schema='default', delimiter=',', lineterminator='\r\n', output_header=True, fetch_size=1000, hive_conf=None): """ """ results_iter = self._get_results(hql, schema, fetch_size=fetch_size, hive_conf=hive_conf) header = next(results_iter) message = None i = 0 with open(csv_filepath, 'wb') as f: writer = csv.writer(f, delimiter=delimiter, lineterminator=lineterminator, encoding='utf-8') try: if output_header: self.log.debug('Cursor description is %s', header) writer.writerow([c[0] for c in header]) for i, row in enumerate(results_iter, 1): writer.writerow(row) if i % fetch_size == 0: self.log.info("Written %s rows so far.", i) except ValueError as exception: message = str(exception) if message: # need to clean up the file first os.remove(csv_filepath) raise ValueError(message) self.log.info("Done. Loaded a total of %s rows.", i)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L207-L219
def open_maybe_zipped(f, mode='r'): """ """ _, archive, filename = ZIP_REGEX.search(f).groups() if archive and zipfile.is_zipfile(archive): return zipfile.ZipFile(archive, mode=mode).open(filename) else: return io.open(f, mode=mode)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/setup.py#L102-L128
def git_version(version): """ """ repo = None try: import git repo = git.Repo('.git') except ImportError: logger.warning('gitpython not found: Cannot compute the git version.') return '' except Exception as e: logger.warning('Cannot compute the git version. {}'.format(e)) return '' if repo: sha = repo.head.commit.hexsha if repo.is_dirty(): return '.dev0+{sha}.dirty'.format(sha=sha) # commit is clean return '.release:{version}+{sha}'.format(version=version, sha=sha) else: return 'no_git_version'
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L170-L189
def run_query(self, body): """ """ conn = self.get_conn() resp = (conn .projects() .runQuery(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp['batch']
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_fileshare_hook.py#L174-L191
def load_string(self, string_data, share_name, directory_name, file_name, **kwargs): """ """ self.connection.create_file_from_text(share_name, directory_name, file_name, string_data, **kwargs)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/ftp_hook.py#L264-L272
def rename(self, from_name, to_name): """ """ conn = self.get_conn() return conn.rename(from_name, to_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L432-L448
def terminate(self, sigkill=False): """ """ if self._process is None: raise AirflowException("Tried to call stop before starting!") # The queue will likely get corrupted, so remove the reference self._result_queue = None self._process.terminate() # Arbitrarily wait 5s for the process to die self._process.join(5) if sigkill and self._process.is_alive(): self.log.warning("Killing PID %s", self._process.pid) os.kill(self._process.pid, signal.SIGKILL)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L931-L944
def clear_nonexistent_import_errors(self, session): """ """ query = session.query(errors.ImportError) if self._file_paths: query = query.filter( ~errors.ImportError.filename.in_(self._file_paths) ) query.delete(synchronize_session='fetch') session.commit()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L83-L110
def create_topic(self, project, topic, fail_if_exists=False): """ """ service = self.get_conn() full_topic = _format_topic(project, topic) try: service.projects().topics().create( name=full_topic, body={}).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the topic already exists. if str(e.resp['status']) == '409': message = 'Topic already exists: {}'.format(full_topic) self.log.warning(message) if fail_if_exists: raise PubSubException(message) else: raise PubSubException( 'Error creating topic {}'.format(full_topic), e)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L249-L292
def command_as_list( self, mark_success=False, ignore_all_deps=False, ignore_task_deps=False, ignore_depends_on_past=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, pool=None, cfg_path=None): """ """ dag = self.task.dag should_pass_filepath = not pickle_id and dag if should_pass_filepath and dag.full_filepath != dag.filepath: path = "DAGS_FOLDER/{}".format(dag.filepath) elif should_pass_filepath and dag.full_filepath: path = dag.full_filepath else: path = None return TaskInstance.generate_command( self.dag_id, self.task_id, self.execution_date, mark_success=mark_success, ignore_all_deps=ignore_all_deps, ignore_task_deps=ignore_task_deps, ignore_depends_on_past=ignore_depends_on_past, ignore_ti_state=ignore_ti_state, local=local, pickle_id=pickle_id, file_path=path, raw=raw, job_id=job_id, pool=pool, cfg_path=cfg_path)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/local_executor.py#L73-L92
def execute_work(self, key, command): """ """ if key is None: return self.log.info("%s running %s", self.__class__.__name__, command) try: subprocess.check_call(command, close_fds=True) state = State.SUCCESS except subprocess.CalledProcessError as e: state = State.FAILED self.log.error("Failed to execute task %s.", str(e)) # TODO: Why is this commented out? # raise e self.result_queue.put((key, state))
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1211-L1273
def run_with_configuration(self, configuration): """ """ jobs = self.service.jobs() job_data = {'configuration': configuration} # Send query and wait for reply. query_reply = jobs \ .insert(projectId=self.project_id, body=job_data) \ .execute(num_retries=self.num_retries) self.running_job_id = query_reply['jobReference']['jobId'] if 'location' in query_reply['jobReference']: location = query_reply['jobReference']['location'] else: location = self.location # Wait for query to finish. keep_polling_job = True while keep_polling_job: try: if location: job = jobs.get( projectId=self.project_id, jobId=self.running_job_id, location=location).execute(num_retries=self.num_retries) else: job = jobs.get( projectId=self.project_id, jobId=self.running_job_id).execute(num_retries=self.num_retries) if job['status']['state'] == 'DONE': keep_polling_job = False # Check if job had errors. if 'errorResult' in job['status']: raise Exception( 'BigQuery job failed. Final error was: {}. The job was: {}'. format(job['status']['errorResult'], job)) else: self.log.info('Waiting for job to complete : %s, %s', self.project_id, self.running_job_id) time.sleep(5) except HttpError as err: if err.resp.status in [500, 503]: self.log.info( '%s: Retryable error, waiting for job to complete: %s', err.resp.status, self.running_job_id) time.sleep(5) else: raise Exception( 'BigQuery job status check failed. Final error was: {}'. format(err.resp.status)) return self.running_job_id
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/qubole_hook.py#L212-L229
def get_extra_links(self, operator, dttm): """ """ conn = BaseHook.get_connection(operator.kwargs['qubole_conn_id']) if conn and conn.host: host = re.sub(r'api$', 'v2/analyze?command_id=', conn.host) else: host = 'https://api.qubole.com/v2/analyze?command_id=' ti = TaskInstance(task=operator, execution_date=dttm) qds_command_id = ti.xcom_pull(task_ids=operator.task_id, key='qbol_cmd_id') url = host + str(qds_command_id) if qds_command_id else '' return url
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/jobs.py#L599-L606
def _exit_gracefully(self, signum, frame): """ """ self.log.info("Exiting gracefully upon receiving signal %s", signum) if self.processor_agent: self.processor_agent.end() sys.exit(os.EX_OK)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mysql_to_gcs.py#L313-L334
def type_map(cls, mysql_type): """ """ d = { FIELD_TYPE.INT24: 'INTEGER', FIELD_TYPE.TINY: 'INTEGER', FIELD_TYPE.BIT: 'INTEGER', FIELD_TYPE.DATETIME: 'TIMESTAMP', FIELD_TYPE.DATE: 'TIMESTAMP', FIELD_TYPE.DECIMAL: 'FLOAT', FIELD_TYPE.NEWDECIMAL: 'FLOAT', FIELD_TYPE.DOUBLE: 'FLOAT', FIELD_TYPE.FLOAT: 'FLOAT', FIELD_TYPE.LONG: 'INTEGER', FIELD_TYPE.LONGLONG: 'INTEGER', FIELD_TYPE.SHORT: 'INTEGER', FIELD_TYPE.TIMESTAMP: 'TIMESTAMP', FIELD_TYPE.YEAR: 'INTEGER', } return d[mysql_type] if mysql_type in d else 'STRING'
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/security.py#L326-L342
def clean_perms(self): """ """ self.log.debug('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(sqla_models.PermissionView) .filter(or_( sqla_models.PermissionView.permission == None, # NOQA sqla_models.PermissionView.view_menu == None, # NOQA )) ) deleted_count = pvms.delete() sesh.commit() if deleted_count: self.log.info('Deleted %s faulty permissions', deleted_count)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1533-L1598
def create_empty_dataset(self, dataset_id="", project_id="", dataset_reference=None): """ """ if dataset_reference: _validate_value('dataset_reference', dataset_reference, dict) else: dataset_reference = {} if "datasetReference" not in dataset_reference: dataset_reference["datasetReference"] = {} if not dataset_reference["datasetReference"].get("datasetId") and not dataset_id: raise ValueError( "{} not provided datasetId. Impossible to create dataset") dataset_required_params = [(dataset_id, "datasetId", ""), (project_id, "projectId", self.project_id)] for param_tuple in dataset_required_params: param, param_name, param_default = param_tuple if param_name not in dataset_reference['datasetReference']: if param_default and not param: self.log.info( "%s was not specified. Will be used default value %s.", param_name, param_default ) param = param_default dataset_reference['datasetReference'].update( {param_name: param}) elif param: _api_resource_configs_duplication_check( param_name, param, dataset_reference['datasetReference'], 'dataset_reference') dataset_id = dataset_reference.get("datasetReference").get("datasetId") dataset_project_id = dataset_reference.get("datasetReference").get( "projectId") self.log.info('Creating Dataset: %s in project: %s ', dataset_id, dataset_project_id) try: self.service.datasets().insert( projectId=dataset_project_id, body=dataset_reference).execute(num_retries=self.num_retries) self.log.info('Dataset created successfully: In project %s ' 'Dataset %s', dataset_project_id, dataset_id) except HttpError as err: raise AirflowException( 'BigQuery job failed. Error was: {}'.format(err.content) )
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L145-L153
def list_directory(self, path): """ """ conn = self.get_conn() files = conn.listdir(path) return files
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/mark_tasks.py#L284-L303
def set_dag_run_state_to_running(dag, execution_date, commit=False, session=None): """ """ res = [] if not dag or not execution_date: return res # Mark the dag run to running. if commit: _set_dag_run_state(dag.dag_id, execution_date, State.RUNNING, session) # To keep the return type consistent with the other similar functions. return res
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/jenkins_job_trigger_operator.py#L124-L147
def build_job(self, jenkins_server): """ """ # Warning if the parameter is too long, the URL can be longer than # the maximum allowed size if self.parameters and isinstance(self.parameters, six.string_types): import ast self.parameters = ast.literal_eval(self.parameters) if not self.parameters: # We need a None to call the non parametrized jenkins api end point self.parameters = None request = Request(jenkins_server.build_job_url(self.job_name, self.parameters, None)) return jenkins_request_with_headers(jenkins_server, request)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/security.py#L440-L453
def update_admin_perm_view(self): """ """ pvms = self.get_session.query(sqla_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] admin = self.find_role('Admin') admin.permissions = list(set(admin.permissions) | set(pvms)) self.get_session.commit()
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/bin/cli.py#L240-L257
def delete_dag(args): """ """ log = LoggingMixin().log if args.yes or input( "This will drop all existing records related to the specified DAG. " "Proceed? (y/n)").upper() == "Y": try: message = api_client.delete_dag(dag_id=args.dag_id) except IOError as err: log.error(err) raise AirflowException(err) log.info(message) else: print("Bail.")
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/webhdfs_hook.py#L92-L104
def check_for_path(self, hdfs_path): """ """ conn = self.get_conn() status = conn.status(hdfs_path, strict=False) return bool(status)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/security.py#L344-L363
def _merge_perm(self, permission_name, view_menu_name): """ """ permission = self.find_permission(permission_name) view_menu = self.find_view_menu(view_menu_name) pv = None if permission and view_menu: pv = self.get_session.query(self.permissionview_model).filter_by( permission=permission, view_menu=view_menu).first() if not pv and permission_name and view_menu_name: self.add_permission_view_menu(permission_name, view_menu_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L407-L417
def buildjschart(self): """""" self.jschart = '' # add custom tooltip string in jschart # default condition (if build_custom_tooltip is not called explicitly with date_flag=True) if self.tooltip_condition_string == '': self.tooltip_condition_string = 'var y = String(graph.point.y);\n' # Include data self.series_js = json.dumps(self.series)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/hive_hooks.py#L171-L255
def run_cli(self, hql, schema=None, verbose=True, hive_conf=None): """ """ conn = self.conn schema = schema or conn.schema if schema: hql = "USE {schema};\n{hql}".format(schema=schema, hql=hql) with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir) as f: hql = hql + '\n' f.write(hql.encode('UTF-8')) f.flush() hive_cmd = self._prepare_cli_cmd() env_context = get_context_from_env_var() # Only extend the hive_conf if it is defined. if hive_conf: env_context.update(hive_conf) hive_conf_params = self._prepare_hiveconf(env_context) if self.mapred_queue: hive_conf_params.extend( ['-hiveconf', 'mapreduce.job.queuename={}' .format(self.mapred_queue), '-hiveconf', 'mapred.job.queue.name={}' .format(self.mapred_queue), '-hiveconf', 'tez.job.queue.name={}' .format(self.mapred_queue) ]) if self.mapred_queue_priority: hive_conf_params.extend( ['-hiveconf', 'mapreduce.job.priority={}' .format(self.mapred_queue_priority)]) if self.mapred_job_name: hive_conf_params.extend( ['-hiveconf', 'mapred.job.name={}' .format(self.mapred_job_name)]) hive_cmd.extend(hive_conf_params) hive_cmd.extend(['-f', f.name]) if verbose: self.log.info("%s", " ".join(hive_cmd)) sp = subprocess.Popen( hive_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=tmp_dir, close_fds=True) self.sp = sp stdout = '' while True: line = sp.stdout.readline() if not line: break stdout += line.decode('UTF-8') if verbose: self.log.info(line.decode('UTF-8').strip()) sp.wait() if sp.returncode: raise AirflowException(stdout) return stdout
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/ti_deps/deps/ready_to_reschedule.py#L34-L69
def _get_dep_statuses(self, ti, session, dep_context): """ """ if dep_context.ignore_in_reschedule_period: yield self._passing_status( reason="The context specified that being in a reschedule period was " "permitted.") return if ti.state not in self.RESCHEDULEABLE_STATES: yield self._passing_status( reason="The task instance is not in State_UP_FOR_RESCHEDULE or NONE state.") return task_reschedules = TaskReschedule.find_for_task_instance(task_instance=ti) if not task_reschedules: yield self._passing_status( reason="There is no reschedule request for this task instance.") return now = timezone.utcnow() next_reschedule_date = task_reschedules[-1].reschedule_date if now >= next_reschedule_date: yield self._passing_status( reason="Task instance id ready for reschedule.") return yield self._failing_status( reason="Task is not ready for reschedule yet but will be rescheduled " "automatically. Current date is {0} and task will be rescheduled " "at {1}.".format(now.isoformat(), next_reschedule_date.isoformat()))
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/spark_submit_hook.py#L299-L320
def _build_track_driver_status_command(self): """ """ connection_cmd = self._get_spark_binary_path() # The url ot the spark master connection_cmd += ["--master", self._connection['master']] # The driver id so we can poll for its status if self._driver_id: connection_cmd += ["--status", self._driver_id] else: raise AirflowException( "Invalid status: attempted to poll driver " + "status but no driver id is known. Giving up.") self.log.debug("Poll driver status cmd: %s", connection_cmd) return connection_cmd
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/wasb_hook.py#L50-L64
def check_for_blob(self, container_name, blob_name, **kwargs): """ """ return self.connection.exists(container_name, blob_name, **kwargs)
https://github.com/soimort/you-get/blob/b746ac01c9f39de94cac2d56f665285b0523b974/src/you_get/common.py#L1258-L1273
def print_more_compatible(*args, **kwargs): import builtins as __builtin__ """ """ # nothing happens on py3.3 and later if sys.version_info[:2] >= (3, 3): return __builtin__.print(*args, **kwargs) # in lower pyver (e.g. 3.2.x), remove 'flush' keyword and flush it as requested doFlush = kwargs.pop('flush', False) ret = __builtin__.print(*args, **kwargs) if doFlush: kwargs.get('file', sys.stdout).flush() return ret
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/hive_hooks.py#L578-L591
def get_table(self, table_name, db='default'): """ """ if db == 'default' and '.' in table_name: db, table_name = table_name.split('.')[:2] with self.metastore as client: return client.get_table(dbname=db, tbl_name=table_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L258-L266
def convert_tuple_type(cls, name, value): """ """ names = ['field_' + str(i) for i in range(len(value))] values = [cls.convert_value(name, value) for name, value in zip(names, value)] return cls.generate_data_dict(names, values)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcs_hook.py#L223-L253
def is_updated_after(self, bucket_name, object_name, ts): """ """ client = self.get_conn() bucket = storage.Bucket(client=client, name=bucket_name) blob = bucket.get_blob(blob_name=object_name) blob.reload() blob_update_time = blob.updated if blob_update_time is not None: import dateutil.tz if not ts.tzinfo: ts = ts.replace(tzinfo=dateutil.tz.tzutc()) self.log.info("Verify object date: %s > %s", blob_update_time, ts) if blob_update_time > ts: return True return False
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L810-L854
def start_in_async(self): """ """ while True: loop_start_time = time.time() if self._signal_conn.poll(): agent_signal = self._signal_conn.recv() if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() break elif agent_signal == DagParsingSignal.END_MANAGER: self.end() sys.exit(os.EX_OK) self._refresh_dag_dir() simple_dags = self.heartbeat() for simple_dag in simple_dags: self._result_queue.put(simple_dag) self._print_stat() all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() dag_parsing_stat = DagParsingStat(self._file_paths, self.get_all_pids(), max_runs_reached, all_files_processed, len(simple_dags)) self._stat_queue.put(dag_parsing_stat) if max_runs_reached: self.log.info("Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs) break loop_duration = time.time() - loop_start_time if loop_duration < 1: sleep_length = 1 - loop_duration self.log.debug("Sleeping for %.2f seconds to prevent excessive logging", sleep_length) time.sleep(sleep_length)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L314-L355
def export_table(self, table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options=None): """ """ cmd = self._export_cmd(table, export_dir, input_null_string, input_null_non_string, staging_table, clear_staging_table, enclosed_by, escaped_by, input_fields_terminated_by, input_lines_terminated_by, input_optionally_enclosed_by, batch, relaxed_isolation, extra_export_options) self.Popen(cmd)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mysql_to_gcs.py#L201-L208
def _configure_csv_file(self, file_handle, schema): """ """ csv_writer = csv.writer(file_handle, encoding='utf-8', delimiter=self.field_delimiter) csv_writer.writerow(schema) return csv_writer
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/salesforce_hook.py#L110-L123
def get_available_fields(self, obj): """ """ self.get_conn() obj_description = self.describe_object(obj) return [field['name'] for field in obj_description['fields']]
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/plugins_manager.py#L101-L118
def is_valid_plugin(plugin_obj, existing_plugins): """ """ if ( inspect.isclass(plugin_obj) and issubclass(plugin_obj, AirflowPlugin) and (plugin_obj is not AirflowPlugin) ): plugin_obj.validate() return plugin_obj not in existing_plugins return False
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/__init__.py#L39-L53
def get_default_executor(): """""" global DEFAULT_EXECUTOR if DEFAULT_EXECUTOR is not None: return DEFAULT_EXECUTOR executor_name = configuration.conf.get('core', 'EXECUTOR') DEFAULT_EXECUTOR = _get_executor(executor_name) log = LoggingMixin().log log.info("Using executor %s", executor_name) return DEFAULT_EXECUTOR
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/security.py#L294-L301
def _has_role(self, role_name_or_list): """ """ if not isinstance(role_name_or_list, list): role_name_or_list = [role_name_or_list] return any( [r.name in role_name_or_list for r in self.get_user_roles()])
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L160-L169
def datetime(*args, **kwargs): """ """ if 'tzinfo' not in kwargs: kwargs['tzinfo'] = TIMEZONE return dt.datetime(*args, **kwargs)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L70-L82
def delete_pool(name, session=None): """""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") pool = session.query(Pool).filter_by(pool=name).first() if pool is None: raise PoolNotFound("Pool '%s' doesn't exist" % name) session.delete(pool) session.commit() return pool
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L45-L66
def create_pool(name, slots, description, session=None): """""" if not (name and name.strip()): raise AirflowBadRequest("Pool name shouldn't be empty") try: slots = int(slots) except ValueError: raise AirflowBadRequest("Bad value for `slots`: %s" % slots) session.expire_on_commit = False pool = session.query(Pool).filter_by(pool=name).first() if pool is None: pool = Pool(pool=name, slots=slots, description=description) session.add(pool) else: pool.slots = slots pool.description = description session.commit() return pool
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L197-L208
def try_number(self): """ """ # This is designed so that task logs end up in the right file. if self.state == State.RUNNING: return self._try_number return self._try_number + 1
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L100-L113
def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier): """ """ response = self.get_conn().create_cluster_snapshot( SnapshotIdentifier=snapshot_identifier, ClusterIdentifier=cluster_identifier, ) return response['Snapshot'] if response['Snapshot'] else None
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L395-L411
def current_state(self, session=None): """ """ TI = TaskInstance ti = session.query(TI).filter( TI.dag_id == self.dag_id, TI.task_id == self.task_id, TI.execution_date == self.execution_date, ).all() if ti: state = ti[0].state else: state = None return state
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/gcs_task_handler.py#L123-L130
def gcs_read(self, remote_log_location): """ """ bkt, blob = self.parse_gcs_url(remote_log_location) return self.hook.download(bkt, blob).decode('utf-8')
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L57-L70
def _dict_to_proto(py_dict, proto): """ """ dict_json_str = json.dumps(py_dict) return json_format.Parse(dict_json_str, proto)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/cloudant_hook.py#L40-L59
def get_conn(self): """ """ conn = self.get_connection(self.cloudant_conn_id) self._validate_connection(conn) cloudant_session = cloudant(user=conn.login, passwd=conn.password, account=conn.host) return cloudant_session
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L550-L586
def are_dependencies_met( self, dep_context=None, session=None, verbose=False): """ """ dep_context = dep_context or DepContext() failed = False verbose_aware_logger = self.log.info if verbose else self.log.debug for dep_status in self.get_failed_dep_statuses( dep_context=dep_context, session=session): failed = True verbose_aware_logger( "Dependencies not met for %s, dependency '%s' FAILED: %s", self, dep_status.dep_name, dep_status.reason ) if failed: return False verbose_aware_logger("Dependencies all met for %s", self) return True
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L170-L219
def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAULT): """ """ if isinstance(cluster, dict): cluster_proto = Cluster() cluster = self._dict_to_proto(py_dict=cluster, proto=cluster_proto) elif not isinstance(cluster, Cluster): raise AirflowException( "cluster is not instance of Cluster proto or python dict") self._append_label(cluster, 'airflow-version', 'v' + version.version) self.log.info( "Creating (project_id=%s, zone=%s, cluster_name=%s)", self.project_id, self.location, cluster.name ) try: op = self.get_client().create_cluster(project_id=project_id or self.project_id, zone=self.location, cluster=cluster, retry=retry, timeout=timeout) op = self.wait_for_operation(op) return op.target_link except AlreadyExists as error: self.log.info('Assuming Success: %s', error.message) return self.get_cluster(name=cluster.name).self_link
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/imap_hook.py#L309-L316
def get_file(self): """ """ return self.part.get_filename(), self.part.get_payload(decode=True)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/aws_sqs_publish_operator.py#L61-L81
def execute(self, context): """ """ hook = SQSHook(aws_conn_id=self.aws_conn_id) result = hook.send_message(queue_url=self.sqs_queue, message_body=self.message_content, delay_seconds=self.delay_seconds, message_attributes=self.message_attributes) self.log.info('result is send_message is %s', result) return result
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L130-L159
def upload_function_zip(self, location, zip_path, project_id=None): """ """ response = self.get_conn().projects().locations().functions().generateUploadUrl( parent=self._full_location(project_id, location) ).execute(num_retries=self.num_retries) upload_url = response.get('uploadUrl') with open(zip_path, 'rb') as fp: requests.put( url=upload_url, data=fp, # Those two headers needs to be specified according to: # https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl # nopep8 headers={ 'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600', } ) return upload_url
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sagemaker_hook.py#L47-L56
def argmin(arr, f): """""" m = None i = None for idx, item in enumerate(arr): if item is not None: if m is None or f(item) < m: m = f(item) i = idx return i
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/gcp_container_operator.py#L282-L308
def _set_env_from_extras(self, extras): """ """ key_path = self._get_field(extras, 'key_path', False) keyfile_json_str = self._get_field(extras, 'keyfile_dict', False) if not key_path and not keyfile_json_str: self.log.info('Using gcloud with application default credentials.') elif key_path: os.environ[G_APP_CRED] = key_path else: # Write service account JSON to secure file for gcloud to reference service_key = tempfile.NamedTemporaryFile(delete=False) service_key.write(keyfile_json_str) os.environ[G_APP_CRED] = service_key.name # Return file object to have a pointer to close after use, # thus deleting from file system. return service_key
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/bin/cli.py#L88-L103
def sigquit_handler(sig, frame): """ """ print("Dumping stack traces for all threads in PID {}".format(os.getpid())) id_to_name = dict([(th.ident, th.name) for th in threading.enumerate()]) code = [] for thread_id, stack in sys._current_frames().items(): code.append("\n# Thread: {}({})" .format(id_to_name.get(thread_id, ""), thread_id)) for filename, line_number, name, line in traceback.extract_stack(stack): code.append('File: "{}", line {}, in {}' .format(filename, line_number, name)) if line: code.append(" {}".format(line.strip())) print("\n".join(code))
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/cli.py#L84-L116
def _build_metrics(func_name, namespace): """ """ metrics = {'sub_command': func_name, 'start_datetime': datetime.utcnow(), 'full_command': '{}'.format(list(sys.argv)), 'user': getpass.getuser()} assert isinstance(namespace, Namespace) tmp_dic = vars(namespace) metrics['dag_id'] = tmp_dic.get('dag_id') metrics['task_id'] = tmp_dic.get('task_id') metrics['execution_date'] = tmp_dic.get('execution_date') metrics['host_name'] = socket.gethostname() extra = json.dumps(dict((k, metrics[k]) for k in ('host_name', 'full_command'))) log = Log( event='cli_{}'.format(func_name), task_instance=None, owner=metrics['user'], extra=extra, task_id=metrics.get('task_id'), dag_id=metrics.get('dag_id'), execution_date=metrics.get('execution_date')) metrics['log'] = log return metrics
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/__init__.py#L31-L36
def _integrate_plugins(): """""" from airflow.plugins_manager import executors_modules for executors_module in executors_modules: sys.modules[executors_module.__name__] = executors_module globals()[executors_module._name] = executors_module
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_sql_hook.py#L136-L160
def patch_instance(self, body, instance, project_id=None): """ """ response = self.get_conn().instances().patch( project=project_id, instance=instance, body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name)
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/celery_executor.py#L168-L176
def _num_tasks_per_fetch_process(self): """ """ return max(1, int(math.ceil(1.0 * len(self.tasks) / self._sync_parallelism)))
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/example_dags/example_trigger_controller_dag.py#L45-L52
def conditionally_trigger(context, dag_run_obj): """""" c_p = context['params']['condition_param'] print("Controller DAG : conditionally_trigger = {}".format(c_p)) if context['params']['condition_param']: dag_run_obj.payload = {'message': context['params']['message']} pp.pprint(dag_run_obj.payload) return dag_run_obj
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_spanner_hook.py#L163-L181
def delete_instance(self, instance_id, project_id=None): """ """ instance = self._get_client(project_id=project_id).instance(instance_id) try: instance.delete() return except GoogleAPICallError as e: self.log.error('An error occurred: %s. Exiting.', e.message) raise e