mirror of
https://github.com/astral-sh/ruff.git
synced 2025-10-01 06:11:43 +00:00

<!-- Thank you for contributing to Ruff! To help us out with reviewing, please consider the following: - Does this pull request include a summary of the change? (See below.) - Does this pull request include a descriptive title? - Does this pull request include references to any relevant issues? --> ## Summary <!-- What's the purpose of the change? What does it do, and why? --> As discussed in https://github.com/astral-sh/ruff/issues/14626#issuecomment-2766146129, we're to separate suggested changes from required changes. The following symbols have been moved to AIR311 from AIR301. They still work in Airflow 3.0, but they're suggested to be changed as they're expected to be removed in a future version. * arguments * `airflow..DAG | dag` * `sla_miss_callback` * operators * `sla` * name * `airflow.Dataset] | [airflow.datasets.Dataset` → `airflow.sdk.Asset` * `airflow.datasets, rest @ ..` * `DatasetAlias` → `airflow.sdk.AssetAlias` * `DatasetAll` → `airflow.sdk.AssetAll` * `DatasetAny` → `airflow.sdk.AssetAny` * `expand_alias_to_datasets` → `airflow.sdk.expand_alias_to_assets` * `metadata.Metadata` → `airflow.sdk.Metadata` <!--airflow.models.baseoperator--> * `airflow.models.baseoperator.chain` → `airflow.sdk.chain` * `airflow.models.baseoperator.chain_linear` → `airflow.sdk.chain_linear` * `airflow.models.baseoperator.cross_downstream` → `airflow.sdk.cross_downstream` * `airflow.models.baseoperatorlink.BaseOperatorLink` → `airflow.sdk.definitions.baseoperatorlink.BaseOperatorLink` * `airflow.timetables, rest @ ..` * `datasets.DatasetOrTimeSchedule` → * `airflow.timetables.assets.AssetOrTimeSchedule` * `airflow.utils, rest @ ..` <!--airflow.utils.dag_parsing_context--> * `dag_parsing_context.get_parsing_context` → `airflow.sdk.get_parsing_context` ## Test Plan <!-- How was it tested? --> The test fixture has been updated acccordingly
92 lines
2.9 KiB
Python
92 lines
2.9 KiB
Python
from __future__ import annotations
|
|
|
|
from datetime import timedelta
|
|
|
|
from airflow import DAG, dag
|
|
from airflow.operators.datetime import BranchDateTimeOperator
|
|
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
|
from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler
|
|
from airflow.providers.apache.hdfs.log.hdfs_task_handler import HdfsTaskHandler
|
|
from airflow.providers.elasticsearch.log.es_task_handler import ElasticsearchTaskHandler
|
|
from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager
|
|
from airflow.providers.google.cloud.log.gcs_task_handler import GCSTaskHandler
|
|
from airflow.providers.standard.operators import datetime, trigger_dagrun
|
|
from airflow.providers.standard.sensors import weekday
|
|
from airflow.sensors.weekday import BranchDayOfWeekOperator, DayOfWeekSensor
|
|
from airflow.timetables.simple import NullTimetable
|
|
|
|
DAG(dag_id="class_schedule", schedule="@hourly")
|
|
|
|
DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
|
|
|
|
DAG(dag_id="class_timetable", timetable=NullTimetable())
|
|
|
|
|
|
DAG(dag_id="class_fail_stop", fail_stop=True)
|
|
|
|
DAG(dag_id="class_default_view", default_view="dag_default_view")
|
|
|
|
DAG(dag_id="class_orientation", orientation="BT")
|
|
|
|
allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
|
|
allow_future_exec_dates_dag.allow_future_exec_dates
|
|
|
|
|
|
@dag(schedule="0 * * * *")
|
|
def decorator_schedule():
|
|
pass
|
|
|
|
|
|
@dag(schedule_interval="0 * * * *")
|
|
def decorator_schedule_interval():
|
|
pass
|
|
|
|
|
|
@dag(timetable=NullTimetable())
|
|
def decorator_timetable():
|
|
pass
|
|
|
|
|
|
@dag()
|
|
def decorator_deprecated_operator_args():
|
|
trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
|
|
task_id="trigger_dagrun_op1", execution_date="2024-12-04"
|
|
)
|
|
trigger_dagrun_op2 = TriggerDagRunOperator(
|
|
task_id="trigger_dagrun_op2", execution_date="2024-12-04"
|
|
)
|
|
|
|
branch_dt_op = datetime.BranchDateTimeOperator(
|
|
task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
|
|
)
|
|
branch_dt_op2 = BranchDateTimeOperator(
|
|
task_id="branch_dt_op2",
|
|
use_task_execution_day=True,
|
|
sla=timedelta(seconds=10),
|
|
)
|
|
|
|
dof_task_sensor = weekday.DayOfWeekSensor(
|
|
task_id="dof_task_sensor", use_task_execution_day=True
|
|
)
|
|
dof_task_sensor2 = DayOfWeekSensor(
|
|
task_id="dof_task_sensor2", use_task_execution_day=True
|
|
)
|
|
|
|
bdow_op = weekday.BranchDayOfWeekOperator(
|
|
task_id="bdow_op", use_task_execution_day=True
|
|
)
|
|
bdow_op2 = BranchDayOfWeekOperator(task_id="bdow_op2", use_task_execution_day=True)
|
|
|
|
trigger_dagrun_op >> trigger_dagrun_op2
|
|
branch_dt_op >> branch_dt_op2
|
|
dof_task_sensor >> dof_task_sensor2
|
|
bdow_op >> bdow_op2
|
|
|
|
|
|
# deprecated filename_template argument in FileTaskHandler
|
|
S3TaskHandler(filename_template="/tmp/test")
|
|
HdfsTaskHandler(filename_template="/tmp/test")
|
|
ElasticsearchTaskHandler(filename_template="/tmp/test")
|
|
GCSTaskHandler(filename_template="/tmp/test")
|
|
|
|
FabAuthManager(None)
|