include

package
v0.23.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 17, 2020 License: Apache-2.0 Imports: 1 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var Composeyml = strings.TrimSpace(`
version: '2'

networks:
  airflow:
    driver: bridge

volumes:
  postgres_data:
    driver: local
  airflow_logs:
    driver: local

services:
  postgres:
    image: postgres:12.2
    restart: unless-stopped
    networks:
      - airflow
    labels:
      io.astronomer.docker: "true"
      io.astronomer.docker.cli: "true"
    ports:
      - {{ .PostgresPort }}:5432
    volumes:
      - postgres_data:/var/lib/postgresql/data
    environment:
      POSTGRES_USER: {{ .PostgresUser }}
      POSTGRES_PASSWORD: {{ .PostgresPassword }}

  scheduler:
    image: {{ .AirflowImage }}
    command: >
      bash -c "(airflow upgradedb || airflow db upgrade) && airflow scheduler"
    restart: unless-stopped
    networks:
      - airflow
    user: {{ .AirflowUser }}
    labels:
      io.astronomer.docker: "true"
      io.astronomer.docker.cli: "true"
      io.astronomer.docker.component: "airflow-scheduler"
    depends_on:
      - postgres
    environment:
      AIRFLOW__CORE__EXECUTOR: LocalExecutor
      AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql://{{ .PostgresUser }}:{{ .PostgresPassword }}@{{ .PostgresHost }}:5432
      AIRFLOW__CORE__LOAD_EXAMPLES: "False"
      AIRFLOW__CORE__FERNET_KEY: "d6Vefz3G9U_ynXB3cr7y_Ak35tAHkEGAVxuz_B-jzWw="
    volumes:
      - {{ .AirflowHome }}/dags:/usr/local/airflow/dags:ro
      - {{ .AirflowHome }}/plugins:/usr/local/airflow/plugins:{{ .MountLabel }}
      - {{ .AirflowHome }}/include:/usr/local/airflow/include:{{ .MountLabel }}
      - airflow_logs:/usr/local/airflow/logs
    {{ .AirflowEnvFile }}

  webserver:
    image: {{ .AirflowImage }}
    command: >
      bash -c '{ airflow create_user "$$@" || airflow users create "$$@"; } && { airflow sync_perm || airflow sync-perm; } && airflow webserver' -- -r Admin -u admin -e admin@example.com -f admin -l user -p admin
    restart: unless-stopped
    networks:
      - airflow
    user: {{ .AirflowUser }}
    labels:
      io.astronomer.docker: "true"
      io.astronomer.docker.cli: "true"
      io.astronomer.docker.component: "airflow-webserver"
    depends_on:
      - scheduler
      - postgres
    environment:
      AIRFLOW__CORE__EXECUTOR: LocalExecutor
      AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql://{{ .PostgresUser }}:{{ .PostgresPassword }}@{{ .PostgresHost }}:5432
      AIRFLOW__CORE__LOAD_EXAMPLES: "False"
      AIRFLOW__CORE__FERNET_KEY: "d6Vefz3G9U_ynXB3cr7y_Ak35tAHkEGAVxuz_B-jzWw="
      AIRFLOW__WEBSERVER__RBAC: "True"
      AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.default"
    ports:
      - {{ .AirflowWebserverPort }}:8080
    volumes:
      - {{ .AirflowHome }}/dags:/usr/local/airflow/dags:{{ .MountLabel }}
      - {{ .AirflowHome }}/plugins:/usr/local/airflow/plugins:{{ .MountLabel }}
      - {{ .AirflowHome }}/include:/usr/local/airflow/include:{{ .MountLabel }}
      - airflow_logs:/usr/local/airflow/logs
    {{ .AirflowEnvFile }}
`)

Composeyml is the docker-compose template

View Source
var Dockerfile = strings.TrimSpace(`
FROM quay.io/astronomer/ap-airflow:%s
`)

Dockerfile is the Dockerfile template

View Source
var Dockerignore = strings.TrimSpace(`
.astro
.git
.env
airflow_settings.yaml
logs/
`)

Dockerignore is the .dockerignore template

View Source
var ExamplePlugin = strings.TrimSpace(`
from airflow.plugins_manager import AirflowPlugin

"""
Look for the Astronomer tab in the UI.
"""
airflow_plugins_ml = {
    "name": "Airflow-Plugins",
    "category": "Astronomer",
    "category_icon": "fa-rocket",
    "href": "https://github.com/airflow-plugins/"
}

astro_docs_ml = {
    "name": "Astronomer Docs",
    "category": "Astronomer",
    "category_icon": "fa-rocket",
    "href": "https://www.astronomer.io/docs/"
}

astro_guides_ml = {
    "name": "Airflow Guide",
    "category": "Astronomer",
    "category_icon": "fa-rocket",
    "href": "https://www.astronomer.io/guides/"
}

class AstroLinksPlugin(AirflowPlugin):
    name = 'astronomer_menu_links'
    operators = []
    flask_blueprints = []
    hooks = []
    executors = []
    macros = []
    admin_views = []
    appbuilder_views = []
    appbuilder_menu_items = [airflow_plugins_ml, astro_docs_ml, astro_guides_ml]
`)

ExamplePlugin created with astro airflow init

View Source
var Exampledag = strings.TrimSpace(`
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator
from airflow.version import version
from datetime import datetime, timedelta


def my_custom_function(ts,**kwargs):
    """
    This can be any python code you want and is called from the python operator. The code is not executed until
    the task is run by the airflow scheduler.
    """
    print(f"I am task number {kwargs['task_number']}. This DAG Run execution date is {ts} and the current time is {datetime.now()}")
    print('Here is the full DAG Run context. It is available because provide_context=True')
    print(kwargs)


# Default settings applied to all tasks
default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=5)
}

# Using a DAG context manager, you don't have to specify the dag property of each task
with DAG('example_dag',
         start_date=datetime(2019, 1, 1),
         max_active_runs=3,
         schedule_interval=timedelta(minutes=30),  # https://airflow.apache.org/docs/stable/scheduler.html#dag-runs
         default_args=default_args,
         # catchup=False # enable if you don't want historical dag runs to run
         ) as dag:

    t0 = DummyOperator(
        task_id='start'
    )

    t1 = DummyOperator(
        task_id='group_bash_tasks'
    )
    t2 = BashOperator(
        task_id='bash_print_date1',
        bash_command='sleep $[ ( $RANDOM % 30 )  + 1 ]s && date')
    t3 = BashOperator(
        task_id='bash_print_date2',
        bash_command='sleep $[ ( $RANDOM % 30 )  + 1 ]s && date')

    # generate tasks with a loop. task_id must be unique
    for task in range(5):
        if version.startswith('2'):
            tn = PythonOperator(
                task_id=f'python_print_date_{task}',
                python_callable=my_custom_function,  # make sure you don't include the () of the function
                op_kwargs={'task_number': task},
            )
        else:
            tn = PythonOperator(
                task_id=f'python_print_date_{task}',
                python_callable=my_custom_function,  # make sure you don't include the () of the function
                op_kwargs={'task_number': task},
                provide_context=True,
            )


        t0 >> tn # indented inside for loop so each task is added downstream of t0

    t0 >> t1
    t1 >> [t2, t3] # lists can be used to specify multiple tasks
`)

Exampledag created with astro airflow init

View Source
var Gitignore = strings.TrimSpace(`
.git
.env
airflow_settings.yaml
`)

Gitignore is the .gitignore template

View Source
var Settingsyml = strings.TrimSpace(`
# This feature is in Beta.

# Please report any bugs to support@astronomer.io

# NOTE: If putting a dict in conn_extra, please wrap in single quotes.

# More details you can find https://github.com/astronomer/docs/blob/main/v0.10/cli-airflow-configuration.md

airflow:
  connections:
    - conn_id:
      conn_type:
      conn_host:
      conn_schema:
      conn_login:
      conn_password:
      conn_port:
      conn_extra:
  pools:
    - pool_name:
      pool_slot:
      pool_description:
  variables:
    - variable_name:
      variable_value:`)

Settingsyml is the settings template

Functions

This section is empty.

Types

This section is empty.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL