# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. [build-system] # build dependencies should be fixed - including all transitive dependencies. This way we can ensure # reproducibility of the build and make sure that any future releases of any dependencies will not # break the build of released airflow sources in the future. # The dependencies can be automatically upgraded by running: # pre-commit run --hook-stage manual update-build-dependencies --all-files requires = [ "GitPython==3.1.43", "gitdb==4.0.11", "hatchling==1.25.0", "packaging==24.1", "pathspec==0.12.1", "pluggy==1.5.0", "smmap==5.0.1", "tomli==2.0.1; python_version < '3.11'", "trove-classifiers==2024.7.2", ] build-backend = "hatchling.build" [project] name = "apache-airflow" description = "Programmatically author, schedule and monitor data pipelines" readme = { file = "generated/PYPI_README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt"] requires-python = "~=3.8,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] maintainers = [ { name = "Apache Software Foundation", email="dev@airflow.apache.org" }, ] keywords = [ "airflow", "orchestration", "workflow", "dag", "pipelines", "automation", "data" ] classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: Web Environment", "Framework :: Apache Airflow", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dynamic = ["version", "optional-dependencies", "dependencies"] # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # !!! YOU MIGHT BE SURPRISED NOT SEEING THE DEPENDENCIES AS `project.dependencies` !!!!!!!!! # !!! AND EXTRAS AS `project.optional-dependencies` !!!!!!!!! # !!! THEY ARE marked as `dynamic` GENERATED by `hatch_build.py` !!!!!!!!! # !!! SEE COMMENTS BELOW TO FIND WHERE DEPENDENCIES ARE MAINTAINED !!!!!!!!! # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # # !!!!!! Those providers are defined in `hatch_build.py` and should be maintained there !!!!!!! # # Those extras are available as regular core airflow extras - they install optional features of Airflow. # # START CORE EXTRAS HERE # # aiobotocore, apache-atlas, apache-webhdfs, async, cgroups, cloudpickle, deprecated-api, github- # enterprise, google-auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, pydantic, # rabbitmq, s3fs, saml, sentry, statsd, uv, virtualenv # # END CORE EXTRAS HERE # # The ``devel`` extras are not available in the released packages. They are only available when you install # Airflow from sources in ``editable`` installation - i.e. one that you are usually using to contribute to # Airflow. They provide tools such as ``pytest`` and ``mypy`` for general purpose development and testing. # # START DEVEL EXTRAS HERE # # devel, devel-all-dbs, devel-ci, devel-debuggers, devel-devscripts, devel-duckdb, devel-hadoop, # devel-mypy, devel-sentry, devel-static-checks, devel-tests # # END DEVEL EXTRAS HERE # # Those extras are bundles dynamically generated from other extras. # # START BUNDLE EXTRAS HERE # # all, all-core, all-dbs, devel-all, devel-ci # # END BUNDLE EXTRAS HERE # # The ``doc`` extras are not available in the released packages. They are only available when you install # Airflow from sources in ``editable`` installation - i.e. one that you are usually using to contribute to # Airflow. They provide tools needed when you want to build Airflow documentation (note that you also need # ``devel`` extras installed for airflow and providers in order to build documentation for airflow and # provider packages respectively). The ``doc`` package is enough to build regular documentation, where # ``doc_gen`` is needed to generate ER diagram we have describing our database. # # START DOC EXTRAS HERE # # doc, doc-gen # # END DOC EXTRAS HERE # # The `deprecated` extras are deprecated extras from Airflow 1 that will be removed in future versions. # # START DEPRECATED EXTRAS HERE # # atlas, aws, azure, cassandra, crypto, druid, gcp, gcp-api, hdfs, hive, kubernetes, mssql, pinot, s3, # spark, webhdfs, winrm # # END DEPRECATED EXTRAS HERE # # !!!!!! Those providers are defined in the `airflow/providers//provider.yaml` files !!!!!!! # # Those extras are available as regular Airflow extras, they install provider packages in standard builds # or dependencies that are necessary to enable the feature in editable build. # START PROVIDER EXTRAS HERE # # airbyte, alibaba, amazon, apache.beam, apache.cassandra, apache.drill, apache.druid, apache.flink, # apache.hdfs, apache.hive, apache.iceberg, apache.impala, apache.kafka, apache.kylin, apache.livy, # apache.pig, apache.pinot, apache.spark, apprise, arangodb, asana, atlassian.jira, celery, cloudant, # cncf.kubernetes, cohere, common.compat, common.io, common.sql, databricks, datadog, dbt.cloud, # dingding, discord, docker, elasticsearch, exasol, fab, facebook, ftp, github, google, grpc, # hashicorp, http, imap, influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, # microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, # oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, # samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, tabular, # telegram, teradata, trino, vertica, weaviate, yandex, ydb, zendesk # # END PROVIDER EXTRAS HERE [project.scripts] airflow = "airflow.__main__:main" [project.urls] "Bug Tracker" = "https://github.com/apache/airflow/issues" Documentation = "https://airflow.apache.org/docs/" Downloads = "https://archive.apache.org/dist/airflow/" Homepage = "https://airflow.apache.org/" "Release Notes" = "https://airflow.apache.org/docs/apache-airflow/stable/release_notes.html" "Slack Chat" = "https://s.apache.org/airflow-slack" "Source Code" = "https://github.com/apache/airflow" Twitter = "https://twitter.com/ApacheAirflow" YouTube = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/" [tool.hatch.envs.default] python = "3.8" platforms = ["linux", "macos"] description = "Default environment with Python 3.8 for maximum compatibility" features = [] [tool.hatch.envs.airflow-38] python = "3.8" platforms = ["linux", "macos"] description = "Environment with Python 3.8. No devel installed." features = [] [tool.hatch.envs.airflow-39] python = "3.9" platforms = ["linux", "macos"] description = "Environment with Python 3.9. No devel installed." features = [] [tool.hatch.envs.airflow-310] python = "3.10" platforms = ["linux", "macos"] description = "Environment with Python 3.10. No devel installed." features = [] [tool.hatch.envs.airflow-311] python = "3.11" platforms = ["linux", "macos"] description = "Environment with Python 3.11. No devel installed" features = [] [tool.hatch.envs.airflow-312] python = "3.12" platforms = ["linux", "macos"] description = "Environment with Python 3.12. No devel installed" features = [] [tool.hatch.version] path = "airflow/__init__.py" [tool.hatch.build.targets.wheel.hooks.custom] path = "./hatch_build.py" [tool.hatch.build.hooks.custom] path = "./hatch_build.py" [tool.hatch.build.targets.custom] path = "./hatch_build.py" [tool.hatch.build.targets.sdist] include = [ "/airflow", "/airflow/git_version" ] exclude = [ "/airflow/providers/", "/airflow/www/node_modules/" ] artifacts = [ "/airflow/www/static/dist/", "/airflow/git_version", "/generated/", ] [tool.hatch.build.targets.wheel] include = [ "/airflow", ] exclude = [ "/airflow/providers/", ] artifacts = [ "/airflow/www/static/dist/", "/airflow/git_version" ] ## black settings ## [tool.black] line-length = 110 target-version = ['py38', 'py39', 'py310', 'py311', 'py312'] ## ruff settings ## [tool.ruff] target-version = "py38" line-length = 110 extend-exclude = [ ".eggs", "*/_vendor/*", # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can # ignore them in ruff "airflow/providers/common/sql/*/*.pyi", "tests/dags/test_imports.py", ] namespace-packages = ["airflow/providers"] [tool.ruff.lint] typing-modules = ["airflow.typing_compat"] extend-select = [ # Enable entire ruff rule section "I", # Missing required import (auto-fixable) "UP", # Pyupgrade "ASYNC", # subset of flake8-async rules "ISC", # Checks for implicit literal string concatenation (auto-fixable) "TCH", # Rules around TYPE_CHECKING blocks "G", # flake8-logging-format rules "LOG", # flake8-logging rules, most of them autofixable "PT", # flake8-pytest-style rules "TID25", # flake8-tidy-imports rules "E", # pycodestyle rules "W", # pycodestyle rules # Per rule enables "RUF006", # Checks for asyncio dangling task "RUF015", # Checks for unnecessary iterable allocation for first element "RUF019", # Checks for unnecessary key check "RUF100", # Unused noqa (auto-fixable) # We ignore more pydocstyle than we enable, so be more selective at what we enable "D1", "D2", "D213", # Conflicts with D212. Both can not be enabled. "D3", "D400", "D401", "D402", "D403", "D412", "D419", "PGH004", # Use specific rule codes when using noqa "PGH005", # Invalid unittest.mock.Mock methods/attributes/properties "S101", # Checks use `assert` outside the test cases, test cases should be added into the exclusions "B004", # Checks for use of hasattr(x, "__call__") and replaces it with callable(x) "B006", # Checks for uses of mutable objects as function argument defaults. "B007", # Checks for unused variables in the loop "B017", # Checks for pytest.raises context managers that catch Exception or BaseException. "B019", # Use of functools.lru_cache or functools.cache on methods can lead to memory leaks "B028", # No explicit stacklevel keyword argument found "TRY002", # Prohibit use of `raise Exception`, use specific exceptions instead. ] ignore = [ "D100", # Unwanted; Docstring at the top of every file. "D102", # TODO: Missing docstring in public method "D103", # TODO: Missing docstring in public function "D104", # Unwanted; Docstring at the top of every `__init__.py` file. "D105", # Unwanted; See https://lists.apache.org/thread/8jbg1dd2lr2cfydtqbjxsd6pb6q2wkc3 "D107", # Unwanted; Docstring in every constructor is unnecessary if the class has a docstring. "D203", "D212", # Conflicts with D213. Both can not be enabled. "D214", "D215", "E731", # Do not assign a lambda expression, use a def "TCH003", # Do not move imports from stdlib to TYPE_CHECKING block "PT004", # Fixture does not return anything, add leading underscore "PT005", # Fixture returns a value, remove leading underscore "PT006", # Wrong type of names in @pytest.mark.parametrize "PT007", # Wrong type of values in @pytest.mark.parametrize "PT011", # pytest.raises() is too broad, set the match parameter "PT019", # fixture without value is injected as parameter, use @pytest.mark.usefixtures instead # Rules below explicitly set off which could overlap with Ruff's formatter # as it recommended by https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules # Except ISC rules "W191", "E111", "E114", "E117", "D206", "D300", "Q000", "Q001", "Q002", "Q003", "COM812", "COM819", "E501", # Formatted code may exceed the line length, leading to line-too-long (E501) errors. ] unfixable = [ # PT022 replace empty `yield` to empty `return`. Might be fixed with a combination of PLR1711 # In addition, it can't do anything with invalid typing annotations, protected by mypy. "PT022", ] [tool.ruff.format] docstring-code-format = true [tool.ruff.lint.isort] required-imports = ["from __future__ import annotations"] combine-as-imports = true [tool.ruff.lint.per-file-ignores] "airflow/__init__.py" = ["F401", "TCH004"] "airflow/models/__init__.py" = ["F401", "TCH004"] "airflow/models/sqla_models.py" = ["F401"] # The test_python.py is needed because adding __future__.annotations breaks runtime checks that are # needed for the test to work "tests/decorators/test_python.py" = ["I002"] # The Pydantic representations of SqlAlchemy Models are not parsed well with Pydantic # when __future__.annotations is used so we need to skip them from upgrading # Pydantic also require models to be imported during execution "airflow/serialization/pydantic/*.py" = ["I002", "UP007", "TCH001"] # Ignore pydoc style from these "*.pyi" = ["D"] "scripts/*" = ["D", "PT"] # In addition ignore pytest specific rules "docs/*" = ["D"] "provider_packages/*" = ["D"] "*/example_dags/*" = ["D"] "chart/*" = ["D"] "dev/*" = ["D"] # In addition, ignore in tests # TID253: Banned top level imports, e.g. pandas, numpy # S101: Use `assert` # TRY002: Use `raise Exception` "dev/perf/*" = ["TID253"] "dev/check_files.py" = ["S101"] "dev/breeze/tests/*" = ["TID253", "S101", "TRY002"] "tests/*" = ["D", "TID253", "S101", "TRY002"] "docker_tests/*" = ["D", "TID253", "S101", "TRY002"] "kubernetes_tests/*" = ["D", "TID253", "S101", "TRY002"] "helm_tests/*" = ["D", "TID253", "S101", "TRY002"] # All of the modules which have an extra license header (i.e. that we copy from another project) need to # ignore E402 -- module level import not at top level "scripts/ci/pre_commit/*.py" = ["E402"] "airflow/api/auth/backend/kerberos_auth.py" = ["E402"] "airflow/security/kerberos.py" = ["E402"] "airflow/security/utils.py" = ["E402"] "tests/providers/common/io/xcom/test_backend.py" = ["E402"] "tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"] "tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] "tests/providers/google/cloud/links/test_translate.py" = ["E402"] "tests/providers/google/cloud/operators/test_automl.py"= ["E402"] "tests/providers/google/cloud/operators/test_vertex_ai.py" = ["E402"] "tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] "tests/providers/google/cloud/triggers/test_vertex_ai.py" = ["E402"] "tests/providers/openai/hooks/test_openai.py" = ["E402"] "tests/providers/openai/operators/test_openai.py" = ["E402"] "tests/providers/qdrant/hooks/test_qdrant.py" = ["E402"] "tests/providers/qdrant/operators/test_qdrant.py" = ["E402"] "tests/providers/snowflake/operators/test_snowflake_sql.py" = ["E402"] "tests/providers/yandex/*/*.py" = ["E402"] # All the modules which do not follow B028 yet: https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/ "helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"] # https://github.com/apache/airflow/issues/39252 "airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] # Test compat imports banned imports to allow testing against older airflow versions "tests/test_utils/compat.py" = ["TID251", "F401"] [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. ban-relative-imports = "all" # Ban certain modules from being imported at module level, instead requiring # that they're imported lazily (e.g., within a function definition). banned-module-level-imports = ["numpy", "pandas"] [tool.ruff.lint.flake8-tidy-imports.banned-api] # Direct import from the airflow package modules and constraints "airflow.AirflowException".msg = "Use airflow.exceptions.AirflowException instead." "airflow.Dataset".msg = "Use airflow.datasets.Dataset instead." "airflow.PY36".msg = "Use sys.version_info >= (3, 6) instead." "airflow.PY37".msg = "Use sys.version_info >= (3, 7) instead." "airflow.PY38".msg = "Use sys.version_info >= (3, 8) instead." "airflow.PY39".msg = "Use sys.version_info >= (3, 9) instead." "airflow.PY310".msg = "Use sys.version_info >= (3, 10) instead." "airflow.PY311".msg = "Use sys.version_info >= (3, 11) instead." "airflow.PY312".msg = "Use sys.version_info >= (3, 12) instead." # Deprecated imports "airflow.models.baseoperator.BaseOperatorLink".msg = "Use airflow.models.baseoperatorlink.BaseOperatorLink" "airflow.models.errors.ImportError".msg = "Use airflow.models.errors.ParseImportError" "airflow.models.ImportError".msg = "Use airflow.models.errors.ParseImportError" # Deprecated in Python 3.11, Pending Removal in Python 3.15: https://github.com/python/cpython/issues/90817 # Deprecation warning in Python 3.11 also recommends using locale.getencoding but it available in Python 3.11 "locale.getdefaultlocale".msg = "Use locale.setlocale() and locale.getlocale() instead." # Deprecated in Python 3.12: https://github.com/python/cpython/issues/103857 "datetime.datetime.utcnow".msg = "Use airflow.utils.timezone.utcnow or datetime.datetime.now(tz=datetime.timezone.utc)" "datetime.datetime.utcfromtimestamp".msg = "Use airflow.utils.timezone.from_timestamp or datetime.datetime.fromtimestamp(tz=datetime.timezone.utc)" # Deprecated in Python 3.12: https://github.com/python/cpython/issues/94309 "typing.Hashable".msg = "Use collections.abc.Hashable" "typing.Sized".msg = "Use collections.abc.Sized" # Uses deprecated in Python 3.12 `datetime.datetime.utcfromtimestamp` "pendulum.from_timestamp".msg = "Use airflow.utils.timezone.from_timestamp" # Flask deprecations, worthwhile to keep it until we migrate to Flask 3.0+ "flask._app_ctx_stack".msg = "Deprecated in Flask 2.2, removed in Flask 3.0" "flask._request_ctx_stack".msg = "Deprecated in Flask 2.2, removed in Flask 3.0" "flask.escape".msg = "Use markupsafe.escape instead. Deprecated in Flask 2.3, removed in Flask 3.0" "flask.Markup".msg = "Use markupsafe.Markup instead. Deprecated in Flask 2.3, removed in Flask 3.0" "flask.signals_available".msg = "Signals are always available. Deprecated in Flask 2.3, removed in Flask 3.0" # Use root logger by a mistake / IDE autosuggestion # If for some reason root logger required it could obtained by logging.getLogger("root") "logging.debug".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.info".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.warning".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.error".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.exception".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.fatal".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.critical".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" "logging.log".msg = "Instantiate new `logger = logging.getLogger(__name__)` and use it instead of root logger" # unittest related restrictions "unittest.TestCase".msg = "Use pytest compatible classes: https://docs.pytest.org/en/stable/getting-started.html#group-multiple-tests-in-a-class" "unittest.skip".msg = "Use `pytest.mark.skip` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks" "unittest.skipIf".msg = "Use `pytest.mark.skipif` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks" "unittest.skipUnless".msg = "Use `pytest.mark.skipif` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks" "unittest.expectedFailure".msg = "Use `pytest.mark.xfail` instead: https://docs.pytest.org/en/stable/reference/reference.html#marks" # Moved in SQLAlchemy 2.0 "sqlalchemy.ext.declarative.declarative_base".msg = "Use `sqlalchemy.orm.declarative_base`. Moved in SQLAlchemy 2.0" "sqlalchemy.ext.declarative.as_declarative".msg = "Use `sqlalchemy.orm.as_declarative`. Moved in SQLAlchemy 2.0" "sqlalchemy.ext.declarative.has_inherited_table".msg = "Use `sqlalchemy.orm.has_inherited_table`. Moved in SQLAlchemy 2.0" "sqlalchemy.ext.declarative.synonym_for".msg = "Use `sqlalchemy.orm.synonym_for`. Moved in SQLAlchemy 2.0" [tool.ruff.lint.flake8-type-checking] exempt-modules = ["typing", "typing_extensions"] [tool.ruff.lint.flake8-pytest-style] mark-parentheses = false fixture-parentheses = false ## pytest settings ## [tool.pytest.ini_options] addopts = [ "-rasl", "--verbosity=2", # Disable `flaky` plugin for pytest. This plugin conflicts with `rerunfailures` because provide the same marker. "-p", "no:flaky", # Disable `nose` builtin plugin for pytest. This feature is deprecated in 7.2 and will be removed in pytest>=8 "-p", "no:nose", # Disable support of a legacy `LocalPath` in favor of stdlib `pathlib.Path`. "-p", "no:legacypath", # Disable warnings summary, because we use our warning summary. "--disable-warnings", "--asyncio-mode=strict", ] norecursedirs = [ ".eggs", "airflow", "tests/_internals", "tests/dags_with_system_exit", "tests/test_utils", "tests/dags_corrupted", "tests/dags", "tests/system/providers/google/cloud/dataproc/resources", "tests/system/providers/google/cloud/gcs/resources", ] log_level = "INFO" filterwarnings = [ "error::pytest.PytestCollectionWarning", "error::pytest.PytestReturnNotNoneWarning", # Avoid building cartesian product which might impact performance "error:SELECT statement has a cartesian product between FROM:sqlalchemy.exc.SAWarning:airflow", 'error:Coercing Subquery object into a select\(\) for use in IN\(\):sqlalchemy.exc.SAWarning:airflow', 'error:Class.*will not make use of SQL compilation caching', "ignore::DeprecationWarning:flask_appbuilder.filemanager", "ignore::DeprecationWarning:flask_appbuilder.widgets", # FAB do not support SQLAclhemy 2 "ignore::sqlalchemy.exc.MovedIn20Warning:flask_appbuilder", # https://github.com/dpgaspar/Flask-AppBuilder/issues/2194 "ignore::DeprecationWarning:marshmallow_sqlalchemy.convert", # https://github.com/dpgaspar/Flask-AppBuilder/pull/1940 "ignore::DeprecationWarning:flask_sqlalchemy", # https://github.com/dpgaspar/Flask-AppBuilder/pull/1903 "ignore::DeprecationWarning:apispec.utils", # Connexion 2 use different deprecated objects, this should be resolved into Connexion 3 # https://github.com/spec-first/connexion/pull/1536 'ignore::DeprecationWarning:connexion.spec', 'ignore:jsonschema\.RefResolver:DeprecationWarning:connexion.json_schema', 'ignore:jsonschema\.exceptions\.RefResolutionError:DeprecationWarning:connexion.json_schema', 'ignore:Accessing jsonschema\.draft4_format_checker:DeprecationWarning:connexion.decorators.validation', ] # We cannot add warnings from the airflow package into `filterwarnings`, # because it invokes import airflow before we set up test environment which breaks the tests. # Instead of that, we use a separate parameter and dynamically add it into `filterwarnings` marker. forbidden_warnings = [ "airflow.exceptions.RemovedInAirflow3Warning", "airflow.utils.context.AirflowContextDeprecationWarning", "airflow.exceptions.AirflowProviderDeprecationWarning", ] python_files = [ "test_*.py", "example_*.py", ] testpaths = [ "tests", ] # Keep temporary directories (created by `tmp_path`) for 2 recent runs only failed tests. tmp_path_retention_count = "2" tmp_path_retention_policy = "failed" ## coverage.py settings ## [tool.coverage.run] branch = true relative_files = true source = ["airflow"] omit = [ "airflow/_vendor/**", "airflow/contrib/**", "airflow/example_dags/**", "airflow/migrations/**", "airflow/providers/**/example_dags/**", "airflow/www/node_modules/**", "airflow/providers/google/ads/_vendor/**", ] [tool.coverage.report] skip_empty = true exclude_also = [ "def __repr__", "raise AssertionError", "raise NotImplementedError", "if __name__ == .__main__.:", "@(abc\\.)?abstractmethod", "@(typing(_extensions)?\\.)?overload", "if (typing(_extensions)?\\.)?TYPE_CHECKING:" ] ## mypy settings ## [tool.mypy] ignore_missing_imports = true no_implicit_optional = true warn_redundant_casts = true warn_unused_ignores = false plugins = [ "dev/mypy/plugin/decorators.py", "dev/mypy/plugin/outputs.py", ] pretty = true show_error_codes = true disable_error_code = [ "annotation-unchecked", ] [[tool.mypy.overrides]] module="airflow.config_templates.default_webserver_config" disable_error_code = [ "var-annotated", ] [[tool.mypy.overrides]] module="airflow.migrations.*" ignore_errors = true [[tool.mypy.overrides]] module="airflow.*._vendor.*" ignore_errors = true [[tool.mypy.overrides]] module= [ "google.cloud.*", "azure.*", ] no_implicit_optional = false [[tool.mypy.overrides]] module=[ "referencing.*", # Beam has some old type annotations, and they introduced an error recently with bad signature of # a function. This is captured in https://github.com/apache/beam/issues/29927 # and we should remove this exclusion when it is fixed. "apache_beam.*" ] ignore_errors = true