|
19 | 19 |
|
20 | 20 | from typing import Annotated, Literal, cast |
21 | 21 |
|
| 22 | +import pendulum |
22 | 23 | from fastapi import Depends, HTTPException, Query, Request, status |
23 | 24 | from sqlalchemy import select |
24 | 25 | from sqlalchemy.orm import Session |
|
50 | 51 | DAGRunPatchStates, |
51 | 52 | DAGRunResponse, |
52 | 53 | DAGRunsBatchBody, |
| 54 | + TriggerDAGRunPostBody, |
53 | 55 | ) |
54 | 56 | from airflow.api_fastapi.core_api.datamodels.task_instances import ( |
55 | 57 | TaskInstanceCollectionResponse, |
56 | 58 | TaskInstanceResponse, |
57 | 59 | ) |
58 | 60 | from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc |
59 | | -from airflow.models import DAG, DagRun |
| 61 | +from airflow.exceptions import ParamValidationError |
| 62 | +from airflow.models import DAG, DagModel, DagRun |
| 63 | +from airflow.models.dag_version import DagVersion |
| 64 | +from airflow.timetables.base import DataInterval |
| 65 | +from airflow.utils.state import DagRunState |
| 66 | +from airflow.utils.types import DagRunTriggeredByType, DagRunType |
60 | 67 |
|
61 | 68 | dag_run_router = AirflowRouter(tags=["DagRun"], prefix="/dags/{dag_id}/dagRuns") |
62 | 69 |
|
@@ -303,6 +310,67 @@ def get_dag_runs( |
303 | 310 | ) |
304 | 311 |
|
305 | 312 |
|
| 313 | +@dag_run_router.post( |
| 314 | + "", |
| 315 | + responses=create_openapi_http_exception_doc( |
| 316 | + [ |
| 317 | + status.HTTP_400_BAD_REQUEST, |
| 318 | + status.HTTP_404_NOT_FOUND, |
| 319 | + status.HTTP_409_CONFLICT, |
| 320 | + ] |
| 321 | + ), |
| 322 | +) |
| 323 | +def trigger_dag_run( |
| 324 | + dag_id, body: TriggerDAGRunPostBody, request: Request, session: Annotated[Session, Depends(get_session)] |
| 325 | +) -> DAGRunResponse: |
| 326 | + """Trigger a DAG.""" |
| 327 | + dm = session.scalar(select(DagModel).where(DagModel.is_active, DagModel.dag_id == dag_id).limit(1)) |
| 328 | + if not dm: |
| 329 | + raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with dag_id: '{dag_id}' not found") |
| 330 | + |
| 331 | + if dm.has_import_errors: |
| 332 | + raise HTTPException( |
| 333 | + status.HTTP_400_BAD_REQUEST, |
| 334 | + f"DAG with dag_id: '{dag_id}' has import errors and cannot be triggered", |
| 335 | + ) |
| 336 | + |
| 337 | + run_id = body.dag_run_id |
| 338 | + logical_date = pendulum.instance(body.logical_date) |
| 339 | + |
| 340 | + try: |
| 341 | + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) |
| 342 | + |
| 343 | + if body.data_interval_start and body.data_interval_end: |
| 344 | + data_interval = DataInterval( |
| 345 | + start=pendulum.instance(body.data_interval_start), |
| 346 | + end=pendulum.instance(body.data_interval_end), |
| 347 | + ) |
| 348 | + else: |
| 349 | + data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) |
| 350 | + dag_version = DagVersion.get_latest_version(dag.dag_id) |
| 351 | + dag_run = dag.create_dagrun( |
| 352 | + run_type=DagRunType.MANUAL, |
| 353 | + run_id=run_id, |
| 354 | + logical_date=logical_date, |
| 355 | + data_interval=data_interval, |
| 356 | + state=DagRunState.QUEUED, |
| 357 | + conf=body.conf, |
| 358 | + external_trigger=True, |
| 359 | + dag_version=dag_version, |
| 360 | + session=session, |
| 361 | + triggered_by=DagRunTriggeredByType.REST_API, |
| 362 | + ) |
| 363 | + dag_run_note = body.note |
| 364 | + if dag_run_note: |
| 365 | + current_user_id = None # refer to https://github.com/apache/airflow/issues/43534 |
| 366 | + dag_run.note = (dag_run_note, current_user_id) |
| 367 | + return dag_run |
| 368 | + except ValueError as e: |
| 369 | + raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) |
| 370 | + except ParamValidationError as e: |
| 371 | + raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) |
| 372 | + |
| 373 | + |
306 | 374 | @dag_run_router.post("/list", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND])) |
307 | 375 | def get_list_dag_runs_batch( |
308 | 376 | dag_id: Literal["~"], body: DAGRunsBatchBody, session: Annotated[Session, Depends(get_session)] |
|
0 commit comments