Skip to content

Instantly share code, notes, and snippets.

@chenhan1218
Last active March 6, 2020 12:25
Show Gist options
  • Save chenhan1218/2a7bd745324dfb015553495750ca4d86 to your computer and use it in GitHub Desktop.
Save chenhan1218/2a7bd745324dfb015553495750ca4d86 to your computer and use it in GitHub Desktop.
from datetime import datetime, timedelta
import pendulum
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
# gcloud composer environments run test --project PROJECT --location us-central1 backfill -- -s 2020-02-24 -e 2020-02-26 timezone
default_args = {"owner": "airflow"}
def f1(tzinfo: datetime.tzinfo = None, **context):
if tzinfo is not None:
print(("local time:", tzinfo.convert(context["execution_date"])))
for key in context:
print((key, context[key]))
dag_id = "timezone"
with DAG(
dag_id,
default_args=default_args,
catchup=False,
start_date=datetime(2020, 2, 21),
schedule_interval="@daily",
) as dag:
# As of airflow 1.10.5, the marcos value is based on UTC.
# For timezone aware task, I suggest to only use execution_date as a reference.
# Don't use ds or prev_ds
# See https://airflow.apache.org/timezone.html#templates
task = PythonOperator(
task_id="t1",
provide_context=True,
depends_on_past=True,
python_callable=f1,
dag=dag,
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment