diff --git a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml b/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml index 707a3dabeaf24..03a19e8c7753e 100644 --- a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml @@ -106,6 +106,7 @@ body: - snowflake - sqlite - ssh + - standard - tableau - telegram - teradata diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 9b1e2f24ff46a..be62d541f0dea 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -186,6 +186,11 @@ labelPRBasedOnFilePath: - tests/providers/common/sql/**/* - tests/system/providers/common/sql/**/* + provider:standard: + - airflow/providers/standard/**/* + - docs/apache-airflow-providers-standard/**/* + - tests/providers/standard/**/* + provider:databricks: - airflow/providers/databricks/**/* - docs/apache-airflow-providers-databricks/**/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 35e8df5fe8445..204ae2471863f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -546,6 +546,14 @@ repos: description: The core example DAGs have no dependencies other than core Airflow entry: "^\\s*from airflow\\.providers.*" pass_filenames: true + exclude: > + (?x) + ^airflow/example_dags/example_branch_datetime_operator.py| + ^airflow/example_dags/example_branch_day_of_week_operator.py| + ^airflow/example_dags/example_sensors.py| + ^airflow/example_dags/example_sensors.py| + ^airflow/example_dags/example_sensors.py| + ^airflow/example_dags/example_time_delta_sensor_async.py files: ^airflow/example_dags/.*\.py$ - id: check-no-airflow-deprecation-in-providers language: pygrep diff --git a/INSTALL b/INSTALL index d9ae8088ee30e..5ccabe2ff3270 100644 --- a/INSTALL +++ b/INSTALL @@ -278,8 +278,8 @@ dingding, discord, docker, edge, elasticsearch, exasol, fab, facebook, ftp, gith hashicorp, http, imap, influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, -samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, telegram, -teradata, trino, vertica, weaviate, yandex, ydb, zendesk +samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, standard, tableau, +telegram, teradata, trino, vertica, weaviate, yandex, ydb, zendesk # END PROVIDER EXTRAS HERE diff --git a/airflow/example_dags/example_branch_datetime_operator.py b/airflow/example_dags/example_branch_datetime_operator.py index d24dd6cde4411..e5aaa592cd162 100644 --- a/airflow/example_dags/example_branch_datetime_operator.py +++ b/airflow/example_dags/example_branch_datetime_operator.py @@ -25,8 +25,8 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.datetime import BranchDateTimeOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.time.operators.datetime import BranchDateTimeOperator dag1 = DAG( dag_id="example_branch_datetime_operator", diff --git a/airflow/example_dags/example_branch_day_of_week_operator.py b/airflow/example_dags/example_branch_day_of_week_operator.py index c532ad712ca8d..ef7caf9cdce70 100644 --- a/airflow/example_dags/example_branch_day_of_week_operator.py +++ b/airflow/example_dags/example_branch_day_of_week_operator.py @@ -25,7 +25,7 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from airflow.operators.weekday import BranchDayOfWeekOperator +from airflow.providers.standard.time.operators.weekday import BranchDayOfWeekOperator from airflow.utils.weekday import WeekDay with DAG( diff --git a/airflow/example_dags/example_sensors.py b/airflow/example_dags/example_sensors.py index 9dbe83d6e4c40..9889695ab3925 100644 --- a/airflow/example_dags/example_sensors.py +++ b/airflow/example_dags/example_sensors.py @@ -23,12 +23,12 @@ from airflow.models.dag import DAG from airflow.operators.bash import BashOperator +from airflow.providers.standard.time.sensors.time import TimeSensor, TimeSensorAsync +from airflow.providers.standard.time.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync +from airflow.providers.standard.time.sensors.weekday import DayOfWeekSensor from airflow.sensors.bash import BashSensor from airflow.sensors.filesystem import FileSensor from airflow.sensors.python import PythonSensor -from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync -from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync -from airflow.sensors.weekday import DayOfWeekSensor from airflow.utils.trigger_rule import TriggerRule from airflow.utils.weekday import WeekDay diff --git a/airflow/example_dags/example_time_delta_sensor_async.py b/airflow/example_dags/example_time_delta_sensor_async.py index a2edb00a03103..f8d43beb964fb 100644 --- a/airflow/example_dags/example_time_delta_sensor_async.py +++ b/airflow/example_dags/example_time_delta_sensor_async.py @@ -28,7 +28,7 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from airflow.sensors.time_delta import TimeDeltaSensorAsync +from airflow.providers.standard.time.sensors.time_delta import TimeDeltaSensorAsync with DAG( dag_id="example_time_delta_sensor_async", diff --git a/airflow/providers/standard/CHANGELOG.rst b/airflow/providers/standard/CHANGELOG.rst new file mode 100644 index 0000000000000..c359185f0ca2c --- /dev/null +++ b/airflow/providers/standard/CHANGELOG.rst @@ -0,0 +1,44 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + .. http://www.apache.org/licenses/LICENSE-2.0 + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +.. NOTE TO CONTRIBUTORS: + Please, only add notes to the Changelog just below the "Changelog" header when there are some breaking changes + and you want to add an explanation to the users on how they are supposed to deal with them. + The changelog is updated and maintained semi-automatically by release manager. + +``apache-airflow-providers-standard`` + + +Changelog +--------- + +1.0.0 +..... + +Breaking changes +~~~~~~~~~~~~~~~~ + +* ``In BranchDayOfWeekOperator, DayOfWeekSensor, BranchDateTimeOperator parameter use_task_execution_date has been removed. Please use use_task_logical_date.`` diff --git a/airflow/providers/standard/__init__.py b/airflow/providers/standard/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/standard/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/standard/provider.yaml b/airflow/providers/standard/provider.yaml new file mode 100644 index 0000000000000..3dc047d897e63 --- /dev/null +++ b/airflow/providers/standard/provider.yaml @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-standard +name: Standard +description: | + Airflow Standard Provider +state: not-ready +source-date-epoch: 1718603992 + +# note that those versions are maintained by release manager - do not update them manually +versions: + - 1.0.0 + +dependencies: + - apache-airflow>=2.10.0 + +integrations: + - integration-name: Standard + external-doc-url: https://airflow.apache.org/ + tags: [apache] + how-to-guide: + - /docs/apache-airflow-providers-standard/operators.rst + +operators: + - integration-name: Standard + python-modules: + - airflow.providers.standard.time.operators.datetime + - airflow.providers.standard.time.operators.weekday + +sensors: + - integration-name: Standard + python-modules: + - airflow.providers.standard.time.sensors.date_time + - airflow.providers.standard.time.sensors.time_delta + - airflow.providers.standard.time.sensors.time + - airflow.providers.standard.time.sensors.weekday diff --git a/airflow/providers/standard/time/__init__.py b/airflow/providers/standard/time/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/standard/time/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/standard/time/operators/__init__.py b/airflow/providers/standard/time/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/standard/time/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/operators/datetime.py b/airflow/providers/standard/time/operators/datetime.py similarity index 100% rename from airflow/operators/datetime.py rename to airflow/providers/standard/time/operators/datetime.py diff --git a/airflow/operators/weekday.py b/airflow/providers/standard/time/operators/weekday.py similarity index 100% rename from airflow/operators/weekday.py rename to airflow/providers/standard/time/operators/weekday.py diff --git a/airflow/providers/standard/time/sensors/__init__.py b/airflow/providers/standard/time/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/standard/time/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/sensors/date_time.py b/airflow/providers/standard/time/sensors/date_time.py similarity index 100% rename from airflow/sensors/date_time.py rename to airflow/providers/standard/time/sensors/date_time.py diff --git a/airflow/sensors/time_sensor.py b/airflow/providers/standard/time/sensors/time.py similarity index 100% rename from airflow/sensors/time_sensor.py rename to airflow/providers/standard/time/sensors/time.py diff --git a/airflow/sensors/time_delta.py b/airflow/providers/standard/time/sensors/time_delta.py similarity index 100% rename from airflow/sensors/time_delta.py rename to airflow/providers/standard/time/sensors/time_delta.py diff --git a/airflow/sensors/weekday.py b/airflow/providers/standard/time/sensors/weekday.py similarity index 100% rename from airflow/sensors/weekday.py rename to airflow/providers/standard/time/sensors/weekday.py diff --git a/contributing-docs/12_airflow_dependencies_and_extras.rst b/contributing-docs/12_airflow_dependencies_and_extras.rst index 18fdf30e6ee24..16d2f32ee172d 100644 --- a/contributing-docs/12_airflow_dependencies_and_extras.rst +++ b/contributing-docs/12_airflow_dependencies_and_extras.rst @@ -186,8 +186,8 @@ dingding, discord, docker, edge, elasticsearch, exasol, fab, facebook, ftp, gith hashicorp, http, imap, influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, -samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, telegram, -teradata, trino, vertica, weaviate, yandex, ydb, zendesk +samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, standard, tableau, +telegram, teradata, trino, vertica, weaviate, yandex, ydb, zendesk .. END PROVIDER EXTRAS HERE diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index e5150fbab58f9..1556dfef6f5a7 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -301,53 +301,53 @@ Usage:breeze[OPTIONSCOMMAND [ARGS]... ╭─ Execution mode ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. +--python-pPython major/minor version used in Airflow image for images. (>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           [default: 3.8]                                               ---integrationIntegration(s) to enable when running (can be more than one).                        +--integrationIntegration(s) to enable when running (can be more than one).                        (all | all-testable | cassandra | celery | drill | kafka | kerberos | mongo | mssql  | openlineage | otel | pinot | qdrant | redis | statsd | trino | ydb)                ---standalone-dag-processorRun standalone dag processor for start-airflow. ---database-isolationRun airflow in database isolation mode. +--standalone-dag-processorRun standalone dag processor for start-airflow. +--database-isolationRun airflow in database isolation mode. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Docker Compose selection and cleanup ───────────────────────────────────────────────────────────────────────────────╮ ---project-nameName of the docker-compose project to bring down. The `docker-compose` is for legacy breeze        -project name and you can use `breeze down --project-name docker-compose` to stop all containers    +--project-nameName of the docker-compose project to bring down. The `docker-compose` is for legacy breeze        +project name and you can use `breeze down --project-name docker-compose` to stop all containers    belonging to it.                                                                                   (breeze | pre-commit | docker-compose)                                                             [default: breeze]                                                                                  ---docker-hostOptional - docker host to use when running docker commands. When set, the `--builder` option is    +--docker-hostOptional - docker host to use when running docker commands. When set, the `--builder` option is    ignored when building images.                                                                      (TEXT)                                                                                             ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Database ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---backend-bDatabase backend to use. If 'none' is chosen, Breeze will start with an invalid database     +--backend-bDatabase backend to use. If 'none' is chosen, Breeze will start with an invalid database     configuration, meaning there will be no database available, and any attempts to connect to   the Airflow database will fail.                                                              (>sqlite< | mysql | postgres | none)                                                         [default: sqlite]                                                                            ---postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] ---mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] ---db-reset-dReset DB when entering the container. +--postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] +--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] +--db-reset-dReset DB when entering the container. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Build CI image (before entering shell) ─────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) [default: autodetect]                                          ---use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] ---uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).(INTEGER RANGE) +--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] +--uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).(INTEGER RANGE) [default: 300; x>=1]                                                 ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Other options ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---forward-credentials-fForward local credentials to container when running. ---max-timeMaximum time that the command should take - if it takes longer, the command will fail. +--forward-credentials-fForward local credentials to container when running. +--max-timeMaximum time that the command should take - if it takes longer, the command will fail. (INTEGER RANGE)                                                                        ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Developer commands ─────────────────────────────────────────────────────────────────────────────────────────────────╮ start-airflow          Enter breeze environment and starts all Airflow components in the tmux session. Compile     diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg index 8fb52ec33922c..0ddded9468a55 100644 --- a/dev/breeze/doc/images/output_build-docs.svg +++ b/dev/breeze/doc/images/output_build-docs.svg @@ -198,37 +198,37 @@ jenkins | microsoft.azure | microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres |   presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake |    -sqlite | ssh | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...                 +sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...      Build documents. ╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---docs-only-dOnly build documentation. ---spellcheck-only-sOnly run spell checking. ---clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +--clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     artifacts before the build - useful for a clean build.                            ---one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        +--one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        errors.                                                                           ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern         +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern         matching the full package name, for example `apache-airflow-providers-*`. Useful  when you want to selectseveral similarly named packages together.                 (TEXT)                                                                            ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) [default: autodetect]                                          ---package-listOptional, contains comma-separated list of package ids that are processed for     +--package-listOptional, contains comma-separated list of package ids that are processed for     documentation building, and document publishing. It is an easier alternative to   adding individual packages as arguments to every command. This overrides the      packages passed as arguments.                                                     (TEXT)                                                                            ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_build-docs.txt b/dev/breeze/doc/images/output_build-docs.txt index 8a3bc4349b459..4bebe1e163985 100644 --- a/dev/breeze/doc/images/output_build-docs.txt +++ b/dev/breeze/doc/images/output_build-docs.txt @@ -1 +1 @@ -767cdd5028d6ac43dd9f2804e0501ee8 +03dd58933b63fc368157f716b1852e1b diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.svg b/dev/breeze/doc/images/output_release-management_add-back-references.svg index 65297fbbc1dcc..b2b90c9e28728 100644 --- a/dev/breeze/doc/images/output_release-management_add-back-references.svg +++ b/dev/breeze/doc/images/output_release-management_add-back-references.svg @@ -146,19 +146,19 @@ jenkins | microsoft.azure | microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres |   presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake |    -sqlite | ssh | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...                 +sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...      Command to add back references for documentation to make it backward compatible. ╭─ Add Back References to Docs ────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.txt b/dev/breeze/doc/images/output_release-management_add-back-references.txt index c198abfcb81ac..a3982ff0fa67a 100644 --- a/dev/breeze/doc/images/output_release-management_add-back-references.txt +++ b/dev/breeze/doc/images/output_release-management_add-back-references.txt @@ -1 +1 @@ -743a6e2ad304078a210877279db4546a +33a9bed85312c80e1464318d1aaa5632 diff --git a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg index 62ed9b25dda68..f07aa12760d92 100644 --- a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg +++ b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg @@ -151,16 +151,16 @@ Generates content for issue to test the release. ╭─ Generate issue content flags ───────────────────────────────────────────────────────────────────────────────────────╮ ---disable-progressDisable progress bar ---excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) ---github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      +--disable-progressDisable progress bar +--excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) +--github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      variable set. Can be generated with:                                                     https://github.com/settings/tokens/new?description=Read%20sssues&scopes=repo:status      (TEXT)                                                                                   ---only-available-in-distOnly consider package ids with packages prepared in the dist folder +--only-available-in-distOnly consider package ids with packages prepared in the dist folder ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg index 6bf07f9bc7f3d..19a549cc5a955 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg @@ -187,28 +187,28 @@ Prepare CHANGELOG, README and COMMITS information for providers. ╭─ Provider documentation preparation flags ───────────────────────────────────────────────────────────────────────────╮ ---base-branchBase branch to use as diff for documentation generation (used for releasing from  +--base-branchBase branch to use as diff for documentation generation (used for releasing from  old branch)                                                                       (TEXT)                                                                            ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   confirms releasefor providers prepared for release - useful to test the script in non-interactive mode in CI.                                                       ---only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     +--only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     documentation                                                                     ---reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   +--reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   you need to regenerate documentation.                                             ---skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    +--skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    default, the remote is recreated and fetched to make sure that it's up to date    and that recent commits are not missing                                           ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg index 3c4e468aba0c7..16e211ec4202d 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg @@ -187,28 +187,28 @@ Prepare sdist/whl packages of Airflow Providers. ╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---clean-distClean dist directory before building packages. Useful when you want to build    +--clean-distClean dist directory before building packages. Useful when you want to build    multiple packages  in a clean environment                                       ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---package-formatFormat of packages.(wheel | sdist | both)[default: wheel] ---package-list-fileRead list of packages from text file (one package per line).(FILENAME) ---skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--package-list-fileRead list of packages from text file (one package per line).(FILENAME) +--skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     debugging and developing changes to the build process.                          ---skip-tag-checkSkip checking if the tag already exists in the remote repository ---version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) ---package-listOptional, contains comma-separated list of package ids that are processed for   +--skip-tag-checkSkip checking if the tag already exists in the remote repository +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +--package-listOptional, contains comma-separated list of package ids that are processed for   documentation building, and document publishing. It is an easier alternative to adding individual packages as arguments to every command. This overrides the    packages passed as arguments.                                                   (TEXT)                                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.svg b/dev/breeze/doc/images/output_release-management_publish-docs.svg index 95d455d7b039c..db974991886e7 100644 --- a/dev/breeze/doc/images/output_release-management_publish-docs.svg +++ b/dev/breeze/doc/images/output_release-management_publish-docs.svg @@ -203,38 +203,38 @@ jenkins | microsoft.azure | microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres |   presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake |    -sqlite | ssh | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...                 +sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...      Command to publish generated documentation to airflow-site ╭─ Publish Docs ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---override-versioned-sOverrides versioned directories. ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern      +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--override-versioned-sOverrides versioned directories. +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern      matching the full package name, for example `apache-airflow-providers-*`.      Useful when you want to selectseveral similarly named packages together.       (TEXT)                                                                         ---package-listOptional, contains comma-separated list of package ids that are processed for  +--package-listOptional, contains comma-separated list of package ids that are processed for  documentation building, and document publishing. It is an easier alternative   to adding individual packages as arguments to every command. This overrides    the packages passed as arguments.                                              (TEXT)                                                                         ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ---parallelismMaximum number of processes to use while running the operation in parallel. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---skip-cleanupSkip cleanup of temporary files created during parallel run. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--skip-cleanupSkip cleanup of temporary files created during parallel run. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.txt b/dev/breeze/doc/images/output_release-management_publish-docs.txt index 1d94b684f6a01..7496e4e0af745 100644 --- a/dev/breeze/doc/images/output_release-management_publish-docs.txt +++ b/dev/breeze/doc/images/output_release-management_publish-docs.txt @@ -1 +1 @@ -d360bdbf659b84e202be9c8ac76610e5 +71bdb81ad79b6f928c0324d9f39af6ae diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg index 9d57216d0fc43..4f29325c201c1 100644 --- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg +++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg @@ -183,9 +183,9 @@ Generate requirements for selected provider. ╭─ Generate provider requirements flags ───────────────────────────────────────────────────────────────────────────────╮ ---pythonPython version to update sbom from. (defaults to all historical python versions) +--pythonPython version to update sbom from. (defaults to all historical python versions) (3.6 | 3.7 | 3.8 | 3.9 | 3.10 | 3.11 | 3.12)                                     ---provider-idProvider id to generate the requirements for                                                   +--provider-idProvider id to generate the requirements for                                                   (airbyte | alibaba | amazon | apache.beam | apache.cassandra | apache.drill | apache.druid |   apache.flink | apache.hdfs | apache.hive | apache.iceberg | apache.impala | apache.kafka |     apache.kylin | apache.livy | apache.pig | apache.pinot | apache.spark | apprise | arangodb |   @@ -196,27 +196,27 @@ microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch  | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp |        -snowflake | sqlite | ssh | tableau | telegram | teradata | trino | vertica | weaviate | yandex -| ydb | zendesk)                                                                               ---provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    +snowflake | sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica |        +weaviate | yandex | ydb | zendesk)                                                             +--provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    value to account for the most recent version of the provider                                   (TEXT)                                                                                         ---forceForce update providers requirements even if they already exist. +--forceForce update providers requirements even if they already exist. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt index f913a56a5b80c..15460f20215c5 100644 --- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt +++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt @@ -1 +1 @@ -483ab08cf0222a2966510cd93945537f +898569c394d60dbb021f0599b9fb7c82 diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index d3d4ad20ab061..17e471c747f1a 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -509,13 +509,13 @@ def get_airflow_extras(): { "python-version": "3.8", "airflow-version": "2.8.4", - "remove-providers": "cloudant fab edge", + "remove-providers": "cloudant fab edge standard", "run-tests": "true", }, { "python-version": "3.8", "airflow-version": "2.9.3", - "remove-providers": "cloudant edge", + "remove-providers": "cloudant edge standard", "run-tests": "true", }, { diff --git a/docs/apache-airflow-providers-standard/changelog.rst b/docs/apache-airflow-providers-standard/changelog.rst new file mode 100644 index 0000000000000..3d9d5b25f5540 --- /dev/null +++ b/docs/apache-airflow-providers-standard/changelog.rst @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../../airflow/providers/standard/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-standard/commits.rst b/docs/apache-airflow-providers-standard/commits.rst new file mode 100644 index 0000000000000..09273ef7e34b7 --- /dev/null +++ b/docs/apache-airflow-providers-standard/commits.rst @@ -0,0 +1,21 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + .. THIS FILE IS UPDATED AUTOMATICALLY_AT_RELEASE_TIME + +Package apache-airflow-providers-standard +------------------------------------------------------ diff --git a/docs/apache-airflow-providers-standard/index.rst b/docs/apache-airflow-providers-standard/index.rst new file mode 100644 index 0000000000000..eb60662df9d8f --- /dev/null +++ b/docs/apache-airflow-providers-standard/index.rst @@ -0,0 +1,90 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-standard`` +=========================================== + + +.. toctree:: + :hidden: + :maxdepth: 1 + :caption: Basics + + Home + Changelog + Security + +.. toctree:: + :hidden: + :maxdepth: 1 + :caption: Guides + + Operators + Sensors + +.. toctree:: + :hidden: + :maxdepth: 1 + :caption: Resources + + PyPI Repository + Installing from sources + Python API <_api/airflow/providers/standard/index> + + + +.. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + + +.. toctree:: + :hidden: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +apache-airflow-providers-standard package +------------------------------------------------------ + + +Release: 1.0.0 + +Provider package +---------------- + +This package is for the ``standard`` provider. +All classes for this package are included in the ``airflow.providers.standard`` python package. + +Installation +------------ + +You can install this package on top of an existing Airflow 2 installation via +``pip install apache-airflow-providers-standard``. +For the minimum Airflow version supported, see ``Requirements`` below. + +Requirements +------------ + +The minimum Apache Airflow version supported by this provider package is ``2.8.0``. + +================== ================== +PIP package Version required +================== ================== +``apache-airflow`` ``>=2.8.0`` +================== ================== diff --git a/docs/apache-airflow-providers-standard/installing-providers-from-sources.rst b/docs/apache-airflow-providers-standard/installing-providers-from-sources.rst new file mode 100644 index 0000000000000..b4e730f4ff21a --- /dev/null +++ b/docs/apache-airflow-providers-standard/installing-providers-from-sources.rst @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../exts/includes/installing-providers-from-sources.rst diff --git a/docs/apache-airflow/howto/operator/datetime.rst b/docs/apache-airflow-providers-standard/operators.rst similarity index 98% rename from docs/apache-airflow/howto/operator/datetime.rst rename to docs/apache-airflow-providers-standard/operators.rst index 53485f47cded8..06bd20894c010 100644 --- a/docs/apache-airflow/howto/operator/datetime.rst +++ b/docs/apache-airflow-providers-standard/operators.rst @@ -15,7 +15,8 @@ specific language governing permissions and limitations under the License. - +.. contents:: Table of Contents + :depth: 2 .. _howto/operator:BranchDateTimeOperator: diff --git a/docs/apache-airflow-providers-standard/security.rst b/docs/apache-airflow-providers-standard/security.rst new file mode 100644 index 0000000000000..afa13dac6fc9b --- /dev/null +++ b/docs/apache-airflow-providers-standard/security.rst @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../exts/includes/security.rst diff --git a/docs/apache-airflow/howto/operator/time.rst b/docs/apache-airflow-providers-standard/sensors.rst similarity index 60% rename from docs/apache-airflow/howto/operator/time.rst rename to docs/apache-airflow-providers-standard/sensors.rst index 3d1e414a5c490..77514dfbe86cf 100644 --- a/docs/apache-airflow/howto/operator/time.rst +++ b/docs/apache-airflow-providers-standard/sensors.rst @@ -15,14 +15,15 @@ specific language governing permissions and limitations under the License. - +.. contents:: Table of Contents + :depth: 2 .. _howto/operator:TimeDeltaSensor: TimeDeltaSensor =============== -Use the :class:`~airflow.sensors.time_delta.TimeDeltaSensor` to end sensing after specific time. +Use the :class:`~airflow.providers.standard.sensors.time_delta.TimeDeltaSensor` to end sensing after specific time. .. exampleinclude:: /../../airflow/example_dags/example_sensors.py @@ -37,7 +38,7 @@ Use the :class:`~airflow.sensors.time_delta.TimeDeltaSensor` to end sensing afte TimeDeltaSensorAsync ==================== -Use the :class:`~airflow.sensors.time_delta.TimeDeltaSensorAsync` to end sensing after specific time. +Use the :class:`~airflow.providers.standard.sensors.time_delta.TimeDeltaSensorAsync` to end sensing after specific time. It is an async version of the operator and requires Triggerer to run. @@ -54,7 +55,7 @@ It is an async version of the operator and requires Triggerer to run. TimeSensor ========== -Use the :class:`~airflow.sensors.time_sensor.TimeSensor` to end sensing after time specified. +Use the :class:`~airflow.providers.standard.sensors.time_sensor.TimeSensor` to end sensing after time specified. .. exampleinclude:: /../../airflow/example_dags/example_sensors.py :language: python @@ -68,7 +69,7 @@ Use the :class:`~airflow.sensors.time_sensor.TimeSensor` to end sensing after ti TimeSensorAsync =============== -Use the :class:`~airflow.sensors.time_sensor.TimeSensorAsync` to end sensing after time specified. +Use the :class:`~airflow.providers.standard.sensors.time_sensor.TimeSensorAsync` to end sensing after time specified. It is an async version of the operator and requires Triggerer to run. .. exampleinclude:: /../../airflow/example_dags/example_sensors.py @@ -76,3 +77,29 @@ It is an async version of the operator and requires Triggerer to run. :dedent: 4 :start-after: [START example_time_sensors_async] :end-before: [END example_time_sensors_async] + +.. _howto/operator:BranchDayOfWeekOperator: + +BranchDayOfWeekOperator +======================= + +Use the :class:`~airflow.operators.weekday.BranchDayOfWeekOperator` to branch your workflow based on week day value. + +.. exampleinclude:: /../../airflow/example_dags/example_branch_day_of_week_operator.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_day_of_week_branch] + :end-before: [END howto_operator_day_of_week_branch] + +.. _howto/operator:DayOfWeekSensor: + +DayOfWeekSensor +=============== + +Use the :class:`~airflow.sensors.weekday.DayOfWeekSensor` to sense for day of week. + +.. exampleinclude:: /../../airflow/example_dags/example_sensors.py + :language: python + :dedent: 4 + :start-after: [START example_day_of_week_sensor] + :end-before: [END example_day_of_week_sensor] diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst index 4ccccbdfb7380..219709dd1da28 100644 --- a/docs/apache-airflow/extra-packages-ref.rst +++ b/docs/apache-airflow/extra-packages-ref.rst @@ -81,6 +81,8 @@ python dependencies for the provided package. +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | saml | ``pip install 'apache-airflow[saml]'`` | Support for SAML authentication in Airflow | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| standard | ``pip install apache-airflow[standard]'`` | Standard hooks and operators | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | +---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ | uv | ``pip install 'apache-airflow[uv]'`` | Install uv - fast, Rust-based package installer (experimental) | diff --git a/docs/apache-airflow/howto/operator/index.rst b/docs/apache-airflow/howto/operator/index.rst index 2ca7a5e96579a..e69f0fd19a660 100644 --- a/docs/apache-airflow/howto/operator/index.rst +++ b/docs/apache-airflow/howto/operator/index.rst @@ -30,9 +30,6 @@ determine what actually executes when your DAG runs. :maxdepth: 2 bash - datetime file python - time - weekday external_task_sensor diff --git a/docs/apache-airflow/howto/operator/weekday.rst b/docs/apache-airflow/howto/operator/weekday.rst deleted file mode 100644 index 5a823e276bb81..0000000000000 --- a/docs/apache-airflow/howto/operator/weekday.rst +++ /dev/null @@ -1,44 +0,0 @@ - .. Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - .. http://www.apache.org/licenses/LICENSE-2.0 - - .. Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - - - -.. _howto/operator:BranchDayOfWeekOperator: - -BranchDayOfWeekOperator -======================= - -Use the :class:`~airflow.operators.weekday.BranchDayOfWeekOperator` to branch your workflow based on week day value. - -.. exampleinclude:: /../../airflow/example_dags/example_branch_day_of_week_operator.py - :language: python - :dedent: 4 - :start-after: [START howto_operator_day_of_week_branch] - :end-before: [END howto_operator_day_of_week_branch] - -.. _howto/operator:DayOfWeekSensor: - -DayOfWeekSensor -=============== - -Use the :class:`~airflow.sensors.weekday.DayOfWeekSensor` to sense for day of week. - -.. exampleinclude:: /../../airflow/example_dags/example_sensors.py - :language: python - :dedent: 4 - :start-after: [START example_day_of_week_sensor] - :end-before: [END example_day_of_week_sensor] diff --git a/docs/apache-airflow/operators-and-hooks-ref.rst b/docs/apache-airflow/operators-and-hooks-ref.rst index 6742559a02304..16b74305a958b 100644 --- a/docs/apache-airflow/operators-and-hooks-ref.rst +++ b/docs/apache-airflow/operators-and-hooks-ref.rst @@ -56,9 +56,6 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - :mod:`airflow.operators.branch` - - * - :mod:`airflow.operators.datetime` - - :doc:`How to use ` - * - :mod:`airflow.operators.empty` - @@ -88,9 +85,6 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - :mod:`airflow.sensors.bash` - :ref:`How to use ` - * - :mod:`airflow.sensors.date_time` - - :doc:`How to use ` - * - :mod:`airflow.sensors.external_task` - :doc:`How to use ` @@ -100,12 +94,6 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - :mod:`airflow.sensors.python` - :ref:`How to use ` - * - :mod:`airflow.sensors.time_delta` - - :ref:`How to use ` - - * - :mod:`airflow.sensors.time_sensor` - - :ref:`How to use ` - * - :mod:`airflow.sensors.weekday` - :ref:`How to use ` diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 0023c18cd0c08..766efc8a44f8d 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -1261,6 +1261,16 @@ "excluded-python-versions": [], "state": "ready" }, + "standard": { + "deps": [ + "apache-airflow>=2.10.0" + ], + "devel-deps": [], + "plugins": [], + "cross-providers-deps": [], + "excluded-python-versions": [], + "state": "not-ready" + }, "tableau": { "deps": [ "apache-airflow>=2.8.0", diff --git a/pyproject.toml b/pyproject.toml index 8000249e5be33..dff78de57687b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,8 +139,8 @@ dynamic = ["version", "optional-dependencies", "dependencies"] # hashicorp, http, imap, influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, # microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, # oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, -# samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, telegram, -# teradata, trino, vertica, weaviate, yandex, ydb, zendesk +# samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, standard, tableau, +# telegram, teradata, trino, vertica, weaviate, yandex, ydb, zendesk # # END PROVIDER EXTRAS HERE diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py index fd8dbe6634a49..b922ab2e52703 100644 --- a/tests/dags/test_sensor.py +++ b/tests/dags/test_sensor.py @@ -20,7 +20,7 @@ from airflow.decorators import task from airflow.models.dag import DAG -from airflow.sensors.date_time import DateTimeSensor +from airflow.providers.standard.time.sensors.date_time import DateTimeSensor from airflow.utils import timezone with DAG( diff --git a/tests/providers/standard/__init__.py b/tests/providers/standard/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/standard/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/standard/time/__init__.py b/tests/providers/standard/time/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/standard/time/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/standard/time/operators/__init__.py b/tests/providers/standard/time/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/standard/time/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/operators/test_datetime.py b/tests/providers/standard/time/operators/test_datetime.py similarity index 99% rename from tests/operators/test_datetime.py rename to tests/providers/standard/time/operators/test_datetime.py index dfee986f66799..9250ae147bf3b 100644 --- a/tests/operators/test_datetime.py +++ b/tests/providers/standard/time/operators/test_datetime.py @@ -25,8 +25,8 @@ from airflow.exceptions import AirflowException from airflow.models.dagrun import DagRun from airflow.models.taskinstance import TaskInstance as TI -from airflow.operators.datetime import BranchDateTimeOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.time.operators.datetime import BranchDateTimeOperator from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State diff --git a/tests/operators/test_weekday.py b/tests/providers/standard/time/operators/test_weekday.py similarity index 99% rename from tests/operators/test_weekday.py rename to tests/providers/standard/time/operators/test_weekday.py index 230ef92777aa3..338a6217cac64 100644 --- a/tests/operators/test_weekday.py +++ b/tests/providers/standard/time/operators/test_weekday.py @@ -27,7 +27,7 @@ from airflow.models.taskinstance import TaskInstance as TI from airflow.models.xcom import XCom from airflow.operators.empty import EmptyOperator -from airflow.operators.weekday import BranchDayOfWeekOperator +from airflow.providers.standard.time.operators.weekday import BranchDayOfWeekOperator from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State diff --git a/tests/providers/standard/time/sensors/__init__.py b/tests/providers/standard/time/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/standard/time/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/sensors/test_date_time.py b/tests/providers/standard/time/sensors/test_date_time.py similarity index 95% rename from tests/sensors/test_date_time.py rename to tests/providers/standard/time/sensors/test_date_time.py index edfd8f64aeb28..10ffe42f631b8 100644 --- a/tests/sensors/test_date_time.py +++ b/tests/providers/standard/time/sensors/test_date_time.py @@ -22,7 +22,7 @@ import pytest from airflow.models.dag import DAG -from airflow.sensors.date_time import DateTimeSensor +from airflow.providers.standard.time.sensors.date_time import DateTimeSensor from airflow.utils import timezone DEFAULT_DATE = timezone.datetime(2015, 1, 1) @@ -84,7 +84,7 @@ def test_invalid_input(self): ], ) @patch( - "airflow.sensors.date_time.timezone.utcnow", + "airflow.providers.standard.time.sensors.date_time.timezone.utcnow", return_value=timezone.datetime(2020, 1, 1, 23, 0, tzinfo=timezone.utc), ) def test_poke(self, mock_utcnow, task_id, target_time, expected): diff --git a/tests/sensors/test_time_sensor.py b/tests/providers/standard/time/sensors/test_time.py similarity index 97% rename from tests/sensors/test_time_sensor.py rename to tests/providers/standard/time/sensors/test_time.py index 7919346a61d34..6ef183ff6c7b0 100644 --- a/tests/sensors/test_time_sensor.py +++ b/tests/providers/standard/time/sensors/test_time.py @@ -25,7 +25,7 @@ from airflow.exceptions import TaskDeferred from airflow.models.dag import DAG -from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync +from airflow.providers.standard.time.sensors.time import TimeSensor, TimeSensorAsync from airflow.triggers.temporal import DateTimeTrigger from airflow.utils import timezone diff --git a/tests/sensors/test_time_delta.py b/tests/providers/standard/time/sensors/test_time_delta.py similarity index 94% rename from tests/sensors/test_time_delta.py rename to tests/providers/standard/time/sensors/test_time_delta.py index 408a3c88287dd..4f2b202f1ad54 100644 --- a/tests/sensors/test_time_delta.py +++ b/tests/providers/standard/time/sensors/test_time_delta.py @@ -26,7 +26,11 @@ from airflow.models import DagBag from airflow.models.dag import DAG -from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync, WaitSensor +from airflow.providers.standard.time.sensors.time_delta import ( + TimeDeltaSensor, + TimeDeltaSensorAsync, + WaitSensor, +) from airflow.utils.timezone import datetime pytestmark = pytest.mark.db_test @@ -77,7 +81,7 @@ def test_timedelta_sensor(self, defer_mock, should_defer): [False, True], ) @mock.patch("airflow.models.baseoperator.BaseOperator.defer") - @mock.patch("airflow.sensors.time_delta.sleep") + @mock.patch("airflow.providers.standard.time.sensors.time_delta.sleep") def test_wait_sensor(self, sleep_mock, defer_mock, should_defer): wait_time = timedelta(seconds=30) op = WaitSensor( diff --git a/tests/sensors/test_weekday_sensor.py b/tests/providers/standard/time/sensors/test_weekday.py similarity index 98% rename from tests/sensors/test_weekday_sensor.py rename to tests/providers/standard/time/sensors/test_weekday.py index 99c4e97c64805..900c3d869261b 100644 --- a/tests/sensors/test_weekday_sensor.py +++ b/tests/providers/standard/time/sensors/test_weekday.py @@ -24,7 +24,7 @@ from airflow.exceptions import AirflowSensorTimeout from airflow.models import DagBag from airflow.models.dag import DAG -from airflow.sensors.weekday import DayOfWeekSensor +from airflow.providers.standard.time.sensors.weekday import DayOfWeekSensor from airflow.utils.timezone import datetime from airflow.utils.weekday import WeekDay from tests.test_utils import db diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index cff85c558fc3c..f5e3843a11c31 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -38,11 +38,11 @@ from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.time.sensors.time import TimeSensor from airflow.sensors.external_task import ( ExternalTaskMarker, ExternalTaskSensor, ) -from airflow.sensors.time_sensor import TimeSensor from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.triggers.external_task import WorkflowTrigger from airflow.utils.hashlib_wrapper import md5