Skip to content

Commit 77e9f92

Browse files
committed
Test standard provider with Airflow 2.8 and 2.9
The standard provider has now min version of Airflow = 2.8 since #43553, but we have not tested it for Airflow 2.8 and 2.9.
1 parent 07e6ada commit 77e9f92

20 files changed

Lines changed: 225 additions & 165 deletions

File tree

contributing-docs/testing/unit_tests.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1173,11 +1173,11 @@ are not part of the public API. We deal with it in one of the following ways:
11731173
11741174
.. code-block:: python
11751175
1176-
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
1176+
from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS
11771177
11781178
1179-
@pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="The tests should be skipped for Airflow < 2.8")
1180-
def some_test_that_only_works_for_airflow_2_8_plus():
1179+
@pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="The tests should be skipped for Airflow < 2.9")
1180+
def some_test_that_only_works_for_airflow_2_9_plus():
11811181
pass
11821182
11831183
4) Sometimes, the tests should only be run when airflow is installed from the sources in main.

dev/breeze/src/airflow_breeze/global_constants.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -574,13 +574,13 @@ def get_airflow_extras():
574574
{
575575
"python-version": "3.9",
576576
"airflow-version": "2.8.4",
577-
"remove-providers": "cloudant fab edge standard",
577+
"remove-providers": "cloudant fab edge",
578578
"run-tests": "true",
579579
},
580580
{
581581
"python-version": "3.9",
582582
"airflow-version": "2.9.3",
583-
"remove-providers": "cloudant edge standard",
583+
"remove-providers": "cloudant edge",
584584
"run-tests": "true",
585585
},
586586
{

providers/src/airflow/providers/standard/operators/python.py

Lines changed: 41 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,16 @@
5454
from airflow.settings import _ENABLE_AIP_44
5555
from airflow.typing_compat import Literal
5656
from airflow.utils import hashlib_wrapper
57-
from airflow.utils.context import context_copy_partial, context_get_outlet_events, context_merge
57+
from airflow.utils.context import context_copy_partial, context_merge
5858
from airflow.utils.file import get_unique_dag_module_name
59-
from airflow.utils.operator_helpers import ExecutionCallableRunner, KeywordParameters
60-
from airflow.utils.process_utils import execute_in_subprocess
59+
from airflow.utils.operator_helpers import KeywordParameters
60+
from airflow.utils.process_utils import execute_in_subprocess, execute_in_subprocess_with_kwargs
6161
from airflow.utils.session import create_session
6262

6363
log = logging.getLogger(__name__)
6464

6565
AIRFLOW_VERSION = Version(airflow_version)
66+
AIRFLOW_V_2_10_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("2.10.0")
6667
AIRFLOW_V_3_0_PLUS = Version(AIRFLOW_VERSION.base_version) >= Version("3.0.0")
6768

6869
if TYPE_CHECKING:
@@ -187,7 +188,15 @@ def __init__(
187188
def execute(self, context: Context) -> Any:
188189
context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
189190
self.op_kwargs = self.determine_kwargs(context)
190-
self._asset_events = context_get_outlet_events(context)
191+
192+
if AIRFLOW_V_3_0_PLUS:
193+
from airflow.utils.context import context_get_outlet_events
194+
195+
self._asset_events = context_get_outlet_events(context)
196+
elif AIRFLOW_V_2_10_PLUS:
197+
from airflow.utils.context import context_get_outlet_events
198+
199+
self._dataset_events = context_get_outlet_events(context)
191200

192201
return_value = self.execute_callable()
193202
if self.show_return_value_in_logs:
@@ -206,7 +215,15 @@ def execute_callable(self) -> Any:
206215
207216
:return: the return value of the call.
208217
"""
209-
runner = ExecutionCallableRunner(self.python_callable, self._asset_events, logger=self.log)
218+
try:
219+
from airflow.utils.operator_helpers import ExecutionCallableRunner
220+
221+
asset_events = self._asset_events if AIRFLOW_V_3_0_PLUS else self._dataset_events
222+
223+
runner = ExecutionCallableRunner(self.python_callable, asset_events, logger=self.log)
224+
except ImportError:
225+
# Handle Pre Airflow 3.10 case where ExecutionCallableRunner was not available
226+
return self.python_callable(*self.op_args, **self.op_kwargs)
210227
return runner.run(*self.op_args, **self.op_kwargs)
211228

212229

@@ -551,18 +568,25 @@ def _execute_python_callable_in_subprocess(self, python_path: Path):
551568
env_vars.update(self.env_vars)
552569

553570
try:
554-
execute_in_subprocess(
555-
cmd=[
556-
os.fspath(python_path),
557-
os.fspath(script_path),
558-
os.fspath(input_path),
559-
os.fspath(output_path),
560-
os.fspath(string_args_path),
561-
os.fspath(termination_log_path),
562-
os.fspath(airflow_context_path),
563-
],
564-
env=env_vars,
565-
)
571+
cmd: list[str] = [
572+
os.fspath(python_path),
573+
os.fspath(script_path),
574+
os.fspath(input_path),
575+
os.fspath(output_path),
576+
os.fspath(string_args_path),
577+
os.fspath(termination_log_path),
578+
os.fspath(airflow_context_path),
579+
]
580+
if AIRFLOW_V_2_10_PLUS:
581+
execute_in_subprocess(
582+
cmd=cmd,
583+
env=env_vars,
584+
)
585+
else:
586+
execute_in_subprocess_with_kwargs(
587+
cmd=cmd,
588+
env=env_vars,
589+
)
566590
except subprocess.CalledProcessError as e:
567591
if e.returncode in self.skip_on_exit_code:
568592
raise AirflowSkipException(f"Process exited with code {e.returncode}. Skipping.")

providers/src/airflow/providers/standard/sensors/date_time.py

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,27 @@
1818
from __future__ import annotations
1919

2020
import datetime
21+
from dataclasses import dataclass
2122
from typing import TYPE_CHECKING, Any, NoReturn, Sequence
2223

24+
from airflow.providers.standard.operators.python import AIRFLOW_V_3_0_PLUS
2325
from airflow.sensors.base import BaseSensorOperator
24-
from airflow.triggers.base import StartTriggerArgs
26+
27+
try:
28+
from airflow.triggers.base import StartTriggerArgs
29+
except ImportError:
30+
# TODO: Remove this when min airflow version is 2.10.0 for standard provider
31+
@dataclass
32+
class StartTriggerArgs: # type: ignore[no-redef]
33+
"""Arguments required for start task execution from triggerer."""
34+
35+
trigger_cls: str
36+
next_method: str
37+
trigger_kwargs: dict[str, Any] | None = None
38+
next_kwargs: dict[str, Any] | None = None
39+
timeout: datetime.timedelta | None = None
40+
41+
2542
from airflow.triggers.temporal import DateTimeTrigger
2643
from airflow.utils import timezone
2744

@@ -125,7 +142,9 @@ def execute(self, context: Context) -> NoReturn:
125142
trigger=DateTimeTrigger(
126143
moment=timezone.parse(self.target_time),
127144
end_from_trigger=self.end_from_trigger,
128-
),
145+
)
146+
if AIRFLOW_V_3_0_PLUS
147+
else DateTimeTrigger(moment=timezone.parse(self.target_time)),
129148
)
130149

131150
def execute_complete(self, context: Context, event: Any = None) -> None:

providers/src/airflow/providers/standard/sensors/time.py

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,27 @@
1818
from __future__ import annotations
1919

2020
import datetime
21+
from dataclasses import dataclass
2122
from typing import TYPE_CHECKING, Any, NoReturn
2223

24+
from airflow.providers.standard.operators.python import AIRFLOW_V_3_0_PLUS
2325
from airflow.sensors.base import BaseSensorOperator
24-
from airflow.triggers.base import StartTriggerArgs
26+
27+
try:
28+
from airflow.triggers.base import StartTriggerArgs
29+
except ImportError:
30+
# TODO: Remove this when min airflow version is 2.10.0 for standard provider
31+
@dataclass
32+
class StartTriggerArgs: # type: ignore[no-redef]
33+
"""Arguments required for start task execution from triggerer."""
34+
35+
trigger_cls: str
36+
next_method: str
37+
trigger_kwargs: dict[str, Any] | None = None
38+
next_kwargs: dict[str, Any] | None = None
39+
timeout: datetime.timedelta | None = None
40+
41+
2542
from airflow.triggers.temporal import DateTimeTrigger
2643
from airflow.utils import timezone
2744

@@ -102,7 +119,9 @@ def __init__(
102119

103120
def execute(self, context: Context) -> NoReturn:
104121
self.defer(
105-
trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger),
122+
trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger)
123+
if AIRFLOW_V_3_0_PLUS
124+
else DateTimeTrigger(moment=self.target_datetime),
106125
method_name="execute_complete",
107126
)
108127

providers/src/airflow/providers/standard/sensors/time_delta.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323

2424
from airflow.configuration import conf
2525
from airflow.exceptions import AirflowSkipException
26+
from airflow.providers.standard.operators.python import AIRFLOW_V_3_0_PLUS
2627
from airflow.sensors.base import BaseSensorOperator
2728
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
2829
from airflow.utils import timezone
@@ -81,7 +82,10 @@ def execute(self, context: Context) -> bool | NoReturn:
8182
# If the target datetime is in the past, return immediately
8283
return True
8384
try:
84-
trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
85+
if AIRFLOW_V_3_0_PLUS:
86+
trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
87+
else:
88+
trigger = DateTimeTrigger(moment=target_dttm)
8589
except (TypeError, ValueError) as e:
8690
if self.soft_fail:
8791
raise AirflowSkipException("Skipping due to soft_fail is set to True.") from e
@@ -121,7 +125,9 @@ def __init__(
121125
def execute(self, context: Context) -> None:
122126
if self.deferrable:
123127
self.defer(
124-
trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True),
128+
trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True)
129+
if AIRFLOW_V_3_0_PLUS
130+
else TimeDeltaTrigger(self.time_to_wait),
125131
method_name="execute_complete",
126132
)
127133
else:

providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,11 @@
2424
from airflow.cli import cli_parser
2525
from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema
2626

27-
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
2827
from tests_common.test_utils.config import conf_vars
2928

3029
mock_boto3 = Mock()
3130

32-
pytestmark = [
33-
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Test requires Airflow 2.8+"),
34-
pytest.mark.skip_if_database_isolation_mode,
35-
]
31+
pytestmark = pytest.mark.skip_if_database_isolation_mode
3632

3733

3834
@pytest.mark.db_test

providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py

Lines changed: 10 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,15 @@
2323
from flask import Flask, session
2424
from flask_appbuilder.menu import MenuItem
2525

26+
from airflow.auth.managers.models.resource_details import (
27+
AccessView,
28+
ConfigurationDetails,
29+
ConnectionDetails,
30+
DagAccessEntity,
31+
DagDetails,
32+
PoolDetails,
33+
VariableDetails,
34+
)
2635
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
2736
from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade
2837
from airflow.providers.amazon.aws.auth_manager.aws_auth_manager import AwsAuthManager
@@ -39,30 +48,10 @@
3948
from airflow.www import app as application
4049
from airflow.www.extensions.init_appbuilder import init_appbuilder
4150

42-
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS
51+
from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS
4352
from tests_common.test_utils.config import conf_vars
4453
from tests_common.test_utils.www import check_content_in_response
4554

46-
try:
47-
from airflow.auth.managers.models.resource_details import (
48-
AccessView,
49-
ConfigurationDetails,
50-
ConnectionDetails,
51-
DagAccessEntity,
52-
DagDetails,
53-
PoolDetails,
54-
VariableDetails,
55-
)
56-
except ImportError:
57-
if not AIRFLOW_V_2_8_PLUS:
58-
pytest.skip(
59-
"Skipping tests that require airflow.auth.managers.models.resource_details for Airflow < 2.8.0",
60-
allow_module_level=True,
61-
)
62-
else:
63-
raise
64-
65-
6655
if TYPE_CHECKING:
6756
from airflow.auth.managers.base_auth_manager import ResourceMethod
6857
from airflow.auth.managers.models.resource_details import AssetDetails

providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,6 @@
3434
from airflow.utils import db, timezone
3535
from airflow.utils.types import DagRunType
3636

37-
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
38-
3937

4038
@patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook")
4139
def test_spark_kubernetes_operator(mock_kubernetes_hook, data_file):
@@ -780,9 +778,6 @@ def test_resolve_application_file_template_non_dictionary(dag_maker, tmp_path, b
780778
@pytest.mark.parametrize(
781779
"use_literal_value", [pytest.param(True, id="literal-value"), pytest.param(False, id="whitespace-compat")]
782780
)
783-
@pytest.mark.skipif(
784-
not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue for Airflow < 2.8.0"
785-
)
786781
def test_resolve_application_file_real_file(
787782
create_task_instance_of_operator, tmp_path, use_literal_value, session
788783
):
@@ -815,9 +810,6 @@ def test_resolve_application_file_real_file(
815810

816811

817812
@pytest.mark.db_test
818-
@pytest.mark.skipif(
819-
not AIRFLOW_V_2_8_PLUS, reason="Skipping tests that require LiteralValue for Airflow < 2.8.0"
820-
)
821813
def test_resolve_application_file_real_file_not_exists(create_task_instance_of_operator, tmp_path, session):
822814
application_file = (tmp_path / "test-application-file.yml").resolve().as_posix()
823815
from airflow.template.templater import LiteralValue

providers/tests/common/sql/hooks/test_dbapi.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,6 @@
3030
from airflow.models import Connection
3131
from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler
3232

33-
from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS
34-
35-
pytestmark = [
36-
pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"),
37-
]
38-
3933

4034
class DbApiHookInProvider(DbApiHook):
4135
conn_name_attr = "test_conn_id"

0 commit comments

Comments
 (0)