mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-04 10:48:32 +00:00
[airflow]: extend names moved from core to provider (AIR303) (#15159)
Some checks are pending
CI / Determine changes (push) Waiting to run
CI / cargo fmt (push) Waiting to run
CI / cargo clippy (push) Blocked by required conditions
CI / cargo test (linux) (push) Blocked by required conditions
CI / cargo test (linux, release) (push) Blocked by required conditions
CI / cargo test (windows) (push) Blocked by required conditions
CI / cargo test (wasm) (push) Blocked by required conditions
CI / cargo build (release) (push) Waiting to run
CI / cargo build (msrv) (push) Blocked by required conditions
CI / cargo fuzz build (push) Blocked by required conditions
CI / fuzz parser (push) Blocked by required conditions
CI / test scripts (push) Blocked by required conditions
CI / ecosystem (push) Blocked by required conditions
CI / cargo shear (push) Blocked by required conditions
CI / python package (push) Waiting to run
CI / pre-commit (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / formatter instabilities and black similarity (push) Blocked by required conditions
CI / test ruff-lsp (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions
Some checks are pending
CI / Determine changes (push) Waiting to run
CI / cargo fmt (push) Waiting to run
CI / cargo clippy (push) Blocked by required conditions
CI / cargo test (linux) (push) Blocked by required conditions
CI / cargo test (linux, release) (push) Blocked by required conditions
CI / cargo test (windows) (push) Blocked by required conditions
CI / cargo test (wasm) (push) Blocked by required conditions
CI / cargo build (release) (push) Waiting to run
CI / cargo build (msrv) (push) Blocked by required conditions
CI / cargo fuzz build (push) Blocked by required conditions
CI / fuzz parser (push) Blocked by required conditions
CI / test scripts (push) Blocked by required conditions
CI / ecosystem (push) Blocked by required conditions
CI / cargo shear (push) Blocked by required conditions
CI / python package (push) Waiting to run
CI / pre-commit (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / formatter instabilities and black similarity (push) Blocked by required conditions
CI / test ruff-lsp (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions
This commit is contained in:
parent
79816f965c
commit
2288cc7478
3 changed files with 223 additions and 46 deletions
|
@ -3,17 +3,41 @@ from airflow.api.auth.backend.basic_auth import auth_current_user
|
|||
from airflow.auth.managers.fab.api.auth.backend import (
|
||||
kerberos_auth as backend_kerberos_auth,
|
||||
)
|
||||
from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
from airflow.auth.managers.fab.security_manager import override as fab_override
|
||||
from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
from airflow.executors.celery_executor import app
|
||||
from airflow.executors.kubernetes_executor_types import (
|
||||
ALL_NAMESPACES,
|
||||
POD_EXECUTOR_DONE_KEY,
|
||||
)
|
||||
from airflow.hooks.dbapi import ConnectorProtocol, DbApiHook
|
||||
from airflow.hooks.hive_hooks import HIVE_QUEUE_PRIORITIES
|
||||
from airflow.macros.hive import closest_ds_partition, max_partition
|
||||
from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
|
||||
# apache-airflow-providers-fab
|
||||
basic_auth, kerberos_auth
|
||||
auth_current_user
|
||||
backend_kerberos_auth
|
||||
fab_override
|
||||
|
||||
FabAuthManager
|
||||
FabAirflowSecurityManagerOverride
|
||||
FabAuthManager()
|
||||
FabAirflowSecurityManagerOverride()
|
||||
|
||||
# apache-airflow-providers-celery
|
||||
DEFAULT_CELERY_CONFIG
|
||||
app
|
||||
|
||||
# apache-airflow-providers-common-sql
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
|
||||
# apache-airflow-providers-cncf-kubernetes
|
||||
ALL_NAMESPACES
|
||||
POD_EXECUTOR_DONE_KEY
|
||||
|
||||
# apache-airflow-providers-apache-hive
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
closest_ds_partition()
|
||||
max_partition()
|
||||
|
|
|
@ -165,7 +165,81 @@ fn moved_to_provider(checker: &mut Checker, expr: &Expr, ranged: impl Ranged) {
|
|||
version: "3.3.0"
|
||||
},
|
||||
)),
|
||||
|
||||
["airflow", "executors", "celery_executor", "app"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.executors.celery_executor.app",
|
||||
new_path: "airflow.providers.celery.executors.celery_executor_utils.app",
|
||||
provider: "celery",
|
||||
version: "3.3.0"
|
||||
},
|
||||
)),
|
||||
// apache-airflow-providers-common-sql
|
||||
["airflow", "hooks", "dbapi", "ConnectorProtocol"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.hooks.dbapi.ConnectorProtocol",
|
||||
new_path: "airflow.providers.common.sql.hooks.sql.ConnectorProtocol",
|
||||
provider: "Common SQL",
|
||||
version: "1.0.0"
|
||||
},
|
||||
)),
|
||||
["airflow", "hooks", "dbapi", "DbApiHook"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.hooks.dbapi.DbApiHook",
|
||||
new_path: "airflow.providers.common.sql.hooks.sql.DbApiHook",
|
||||
provider: "Common SQL",
|
||||
version: "1.0.0"
|
||||
},
|
||||
)),
|
||||
// apache-airflow-providers-cncf-kubernetes
|
||||
["airflow", "executors", "kubernetes_executor_types", "ALL_NAMESPACES"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.executors.kubernetes_executor_types.ALL_NAMESPACES",
|
||||
new_path: "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.ALL_NAMESPACES",
|
||||
provider: "Kubernetes",
|
||||
version: "7.4.0"
|
||||
},
|
||||
)),
|
||||
["airflow", "executors", "kubernetes_executor_types", "POD_EXECUTOR_DONE_KEY"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY",
|
||||
new_path: "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY",
|
||||
provider: "Kubernetes",
|
||||
version: "7.4.0"
|
||||
},
|
||||
)),
|
||||
// apache-airflow-providers-apache-hive
|
||||
["airflow", "hooks", "hive_hooks", "HIVE_QUEUE_PRIORITIES"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES",
|
||||
new_path: "airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES",
|
||||
provider: "Apache Hive",
|
||||
version: "1.0.0"
|
||||
},
|
||||
)),
|
||||
["airflow", "macros", "hive", "closest_ds_partition"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.macros.hive.closest_ds_partition",
|
||||
new_path: "airflow.providers.apache.hive.macros.hive.closest_ds_partition",
|
||||
provider: "Apache Hive",
|
||||
version: "5.1.0"
|
||||
},
|
||||
)),
|
||||
["airflow", "macros", "hive", "max_partition"] => Some((
|
||||
qualname.to_string(),
|
||||
Replacement::ImportPathMoved{
|
||||
original_path: "airflow.macros.hive.max_partition",
|
||||
new_path: "airflow.providers.apache.hive.macros.hive.max_partition",
|
||||
provider: "Apache Hive",
|
||||
version: "5.1.0"
|
||||
},
|
||||
)),
|
||||
_ => None,
|
||||
});
|
||||
if let Some((deprecated, replacement)) = result {
|
||||
|
|
|
@ -2,84 +2,163 @@
|
|||
source: crates/ruff_linter/src/rules/airflow/mod.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
AIR303.py:11:1: AIR303 Import path `airflow.api.auth.backend.basic_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:20:1: AIR303 Import path `airflow.api.auth.backend.basic_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
9 | from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
10 |
|
||||
11 | basic_auth, kerberos_auth
|
||||
19 | # apache-airflow-providers-fab
|
||||
20 | basic_auth, kerberos_auth
|
||||
| ^^^^^^^^^^ AIR303
|
||||
12 | auth_current_user
|
||||
13 | backend_kerberos_auth
|
||||
21 | auth_current_user
|
||||
22 | backend_kerberos_auth
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and import from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
AIR303.py:11:13: AIR303 Import path `airflow.api.auth.backend.kerberos_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:20:13: AIR303 Import path `airflow.api.auth.backend.kerberos_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
9 | from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
10 |
|
||||
11 | basic_auth, kerberos_auth
|
||||
19 | # apache-airflow-providers-fab
|
||||
20 | basic_auth, kerberos_auth
|
||||
| ^^^^^^^^^^^^^ AIR303
|
||||
12 | auth_current_user
|
||||
13 | backend_kerberos_auth
|
||||
21 | auth_current_user
|
||||
22 | backend_kerberos_auth
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and import from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
AIR303.py:12:1: AIR303 Import path `airflow.api.auth.backend.basic_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:21:1: AIR303 Import path `airflow.api.auth.backend.basic_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
11 | basic_auth, kerberos_auth
|
||||
12 | auth_current_user
|
||||
19 | # apache-airflow-providers-fab
|
||||
20 | basic_auth, kerberos_auth
|
||||
21 | auth_current_user
|
||||
| ^^^^^^^^^^^^^^^^^ AIR303
|
||||
13 | backend_kerberos_auth
|
||||
14 | fab_override
|
||||
22 | backend_kerberos_auth
|
||||
23 | fab_override
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and import from `airflow.providers.fab.auth_manager.api.auth.backend.basic_auth` instead.
|
||||
|
||||
AIR303.py:13:1: AIR303 Import path `airflow.auth_manager.api.auth.backend.kerberos_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:22:1: AIR303 Import path `airflow.auth_manager.api.auth.backend.kerberos_auth` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
11 | basic_auth, kerberos_auth
|
||||
12 | auth_current_user
|
||||
13 | backend_kerberos_auth
|
||||
20 | basic_auth, kerberos_auth
|
||||
21 | auth_current_user
|
||||
22 | backend_kerberos_auth
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
14 | fab_override
|
||||
23 | fab_override
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and import from `airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth` instead.
|
||||
|
||||
AIR303.py:14:1: AIR303 Import path `airflow.auth.managers.fab.security_managr.override` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:23:1: AIR303 Import path `airflow.auth.managers.fab.security_managr.override` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
12 | auth_current_user
|
||||
13 | backend_kerberos_auth
|
||||
14 | fab_override
|
||||
21 | auth_current_user
|
||||
22 | backend_kerberos_auth
|
||||
23 | fab_override
|
||||
| ^^^^^^^^^^^^ AIR303
|
||||
15 |
|
||||
16 | FabAuthManager
|
||||
24 |
|
||||
25 | FabAuthManager()
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and import from `airflow.providers.fab.auth_manager.security_manager.override` instead.
|
||||
|
||||
AIR303.py:16:1: AIR303 `airflow.auth.managers.fab.fab_auth_manager.FabAuthManager` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:25:1: AIR303 `airflow.auth.managers.fab.fab_auth_manager.FabAuthManager` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
14 | fab_override
|
||||
15 |
|
||||
16 | FabAuthManager
|
||||
23 | fab_override
|
||||
24 |
|
||||
25 | FabAuthManager()
|
||||
| ^^^^^^^^^^^^^^ AIR303
|
||||
17 | FabAirflowSecurityManagerOverride
|
||||
26 | FabAirflowSecurityManagerOverride()
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.security_manager.FabAuthManager` instead.
|
||||
|
||||
AIR303.py:17:1: AIR303 `airflow.www.security.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
||||
AIR303.py:26:1: AIR303 `airflow.www.security.FabAirflowSecurityManagerOverride` is moved into `fab` provider in Airflow 3.0;
|
||||
|
|
||||
16 | FabAuthManager
|
||||
17 | FabAirflowSecurityManagerOverride
|
||||
25 | FabAuthManager()
|
||||
26 | FabAirflowSecurityManagerOverride()
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
18 |
|
||||
19 | DEFAULT_CELERY_CONFIG
|
||||
27 |
|
||||
28 | # apache-airflow-providers-celery
|
||||
|
|
||||
= help: Install `apache-airflow-provider-fab>=1.0.0` and use `airflow.providers.fab.auth_manager.security_manager.override.FabAirflowSecurityManagerOverride` instead.
|
||||
|
||||
AIR303.py:19:1: AIR303 Import path `airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG` is moved into `celery` provider in Airflow 3.0;
|
||||
AIR303.py:29:1: AIR303 Import path `airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG` is moved into `celery` provider in Airflow 3.0;
|
||||
|
|
||||
17 | FabAirflowSecurityManagerOverride
|
||||
18 |
|
||||
19 | DEFAULT_CELERY_CONFIG
|
||||
28 | # apache-airflow-providers-celery
|
||||
29 | DEFAULT_CELERY_CONFIG
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
30 | app
|
||||
|
|
||||
= help: Install `apache-airflow-provider-celery>=3.3.0` and import from `airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG` instead.
|
||||
|
||||
AIR303.py:30:1: AIR303 Import path `airflow.executors.celery_executor.app` is moved into `celery` provider in Airflow 3.0;
|
||||
|
|
||||
28 | # apache-airflow-providers-celery
|
||||
29 | DEFAULT_CELERY_CONFIG
|
||||
30 | app
|
||||
| ^^^ AIR303
|
||||
31 |
|
||||
32 | # apache-airflow-providers-common-sql
|
||||
|
|
||||
= help: Install `apache-airflow-provider-celery>=3.3.0` and import from `airflow.providers.celery.executors.celery_executor_utils.app` instead.
|
||||
|
||||
AIR303.py:33:1: AIR303 Import path `airflow.hooks.dbapi.ConnectorProtocol` is moved into `Common SQL` provider in Airflow 3.0;
|
||||
|
|
||||
32 | # apache-airflow-providers-common-sql
|
||||
33 | ConnectorProtocol()
|
||||
| ^^^^^^^^^^^^^^^^^ AIR303
|
||||
34 | DbApiHook()
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Common SQL>=1.0.0` and import from `airflow.providers.common.sql.hooks.sql.ConnectorProtocol` instead.
|
||||
|
||||
AIR303.py:34:1: AIR303 Import path `airflow.hooks.dbapi.DbApiHook` is moved into `Common SQL` provider in Airflow 3.0;
|
||||
|
|
||||
32 | # apache-airflow-providers-common-sql
|
||||
33 | ConnectorProtocol()
|
||||
34 | DbApiHook()
|
||||
| ^^^^^^^^^ AIR303
|
||||
35 |
|
||||
36 | # apache-airflow-providers-cncf-kubernetes
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Common SQL>=1.0.0` and import from `airflow.providers.common.sql.hooks.sql.DbApiHook` instead.
|
||||
|
||||
AIR303.py:37:1: AIR303 Import path `airflow.executors.kubernetes_executor_types.ALL_NAMESPACES` is moved into `Kubernetes` provider in Airflow 3.0;
|
||||
|
|
||||
36 | # apache-airflow-providers-cncf-kubernetes
|
||||
37 | ALL_NAMESPACES
|
||||
| ^^^^^^^^^^^^^^ AIR303
|
||||
38 | POD_EXECUTOR_DONE_KEY
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Kubernetes>=7.4.0` and import from `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.ALL_NAMESPACES` instead.
|
||||
|
||||
AIR303.py:38:1: AIR303 Import path `airflow.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY` is moved into `Kubernetes` provider in Airflow 3.0;
|
||||
|
|
||||
36 | # apache-airflow-providers-cncf-kubernetes
|
||||
37 | ALL_NAMESPACES
|
||||
38 | POD_EXECUTOR_DONE_KEY
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
39 |
|
||||
40 | # apache-airflow-providers-apache-hive
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Kubernetes>=7.4.0` and import from `airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY` instead.
|
||||
|
||||
AIR303.py:41:1: AIR303 Import path `airflow.hooks.hive_hooks.HIVE_QUEUE_PRIORITIES` is moved into `Apache Hive` provider in Airflow 3.0;
|
||||
|
|
||||
40 | # apache-airflow-providers-apache-hive
|
||||
41 | HIVE_QUEUE_PRIORITIES
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
42 | closest_ds_partition()
|
||||
43 | max_partition()
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Apache Hive>=1.0.0` and import from `airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES` instead.
|
||||
|
||||
AIR303.py:42:1: AIR303 Import path `airflow.macros.hive.closest_ds_partition` is moved into `Apache Hive` provider in Airflow 3.0;
|
||||
|
|
||||
40 | # apache-airflow-providers-apache-hive
|
||||
41 | HIVE_QUEUE_PRIORITIES
|
||||
42 | closest_ds_partition()
|
||||
| ^^^^^^^^^^^^^^^^^^^^ AIR303
|
||||
43 | max_partition()
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Apache Hive>=5.1.0` and import from `airflow.providers.apache.hive.macros.hive.closest_ds_partition` instead.
|
||||
|
||||
AIR303.py:43:1: AIR303 Import path `airflow.macros.hive.max_partition` is moved into `Apache Hive` provider in Airflow 3.0;
|
||||
|
|
||||
41 | HIVE_QUEUE_PRIORITIES
|
||||
42 | closest_ds_partition()
|
||||
43 | max_partition()
|
||||
| ^^^^^^^^^^^^^ AIR303
|
||||
|
|
||||
= help: Install `apache-airflow-provider-Apache Hive>=5.1.0` and import from `airflow.providers.apache.hive.macros.hive.max_partition` instead.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue