Skip to content

Commit 63061de

Browse files
committed
Fix DB migrations from 2.10.5 to 3.0.0 for SQlite
FK naming is so in-consistent. SQLite doesn't really "have" fk names, so this is empirically what worked. Fixes #49296
1 parent d0d0e3c commit 63061de

3 files changed

Lines changed: 29 additions & 15 deletions

File tree

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
da000ad784f974dad63f6db08942d8e968242380f468bc43e35de5634960dcfc
1+
505e2e6c0f9c6988297f939ea005dde1b458e73b915fe33bc90e745f4498c7ef

airflow-core/src/airflow/migrations/versions/0041_3_0_0_rename_dataset_as_asset.py

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,8 @@ def _rename_pk_constraint(
118118
def upgrade():
119119
"""Rename dataset as asset."""
120120
dialect = op.get_bind().dialect.name
121+
is_sqlite = dialect == "sqlite"
122+
121123
# Rename tables
122124
for original_name, new_name in table_name_mappings:
123125
op.rename_table(original_name, new_name)
@@ -155,7 +157,10 @@ def upgrade():
155157
)
156158

157159
with op.batch_alter_table("asset_alias_asset", schema=None) as batch_op:
158-
batch_op.drop_constraint(op.f("dataset_alias_dataset_alias_id_fkey"), type_="foreignkey")
160+
batch_op.drop_constraint(
161+
"ds_dsa_alias_id" if is_sqlite else op.f("dataset_alias_dataset_dataset_id_fkey"),
162+
type_="foreignkey",
163+
)
159164
_rename_index(
160165
batch_op=batch_op,
161166
original_name="idx_dataset_alias_dataset_alias_id",
@@ -172,7 +177,10 @@ def upgrade():
172177
ondelete="CASCADE",
173178
)
174179

175-
batch_op.drop_constraint(op.f("dataset_alias_dataset_dataset_id_fkey"), type_="foreignkey")
180+
batch_op.drop_constraint(
181+
"ds_dsa_dataset_id" if is_sqlite else op.f("dataset_alias_dataset_alias_id_fkey"),
182+
type_="foreignkey",
183+
)
176184
if dialect == "postgresql":
177185
op.execute("ALTER TABLE asset_alias_asset DROP CONSTRAINT IF EXISTS ds_dsa_alias_id")
178186
op.execute("ALTER TABLE asset_alias_asset DROP CONSTRAINT IF EXISTS ds_dsa_dataset_id")
@@ -196,7 +204,10 @@ def upgrade():
196204
)
197205

198206
with op.batch_alter_table("asset_alias_asset_event", schema=None) as batch_op:
199-
batch_op.drop_constraint(op.f("dataset_alias_dataset_event_alias_id_fkey"), type_="foreignkey")
207+
batch_op.drop_constraint(
208+
"dss_de_alias_id" if is_sqlite else op.f("dataset_alias_dataset_event_alias_id_fkey"),
209+
type_="foreignkey",
210+
)
200211
_rename_index(
201212
batch_op=batch_op,
202213
original_name="idx_dataset_alias_dataset_event_alias_id",
@@ -212,7 +223,10 @@ def upgrade():
212223
ondelete="CASCADE",
213224
)
214225

215-
batch_op.drop_constraint(op.f("dataset_alias_dataset_event_event_id_fkey"), type_="foreignkey")
226+
batch_op.drop_constraint(
227+
"dss_de_event_id" if is_sqlite else op.f("dataset_alias_dataset_event_event_id_fkey"),
228+
type_="foreignkey",
229+
)
216230
if dialect == "postgresql":
217231
op.execute("ALTER TABLE asset_alias_asset_event DROP CONSTRAINT IF EXISTS dss_de_alias_id")
218232
op.execute("ALTER TABLE asset_alias_asset_event DROP CONSTRAINT IF EXISTS dss_de_event_id")
@@ -469,6 +483,8 @@ def upgrade():
469483

470484
def downgrade():
471485
"""Unapply Rename dataset as asset."""
486+
dialect = op.get_bind().dialect.name
487+
is_sqlite = dialect == "sqlite"
472488
# Rename tables
473489
for original_name, new_name in table_name_mappings:
474490
op.rename_table(new_name, original_name)
@@ -507,7 +523,7 @@ def downgrade():
507523
unique=False,
508524
)
509525
batch_op.create_foreign_key(
510-
constraint_name=op.f("dataset_alias_dataset_alias_id_fkey"),
526+
constraint_name="ds_dsa_dataset_id" if is_sqlite else op.f("dataset_alias_dataset_alias_id_fkey"),
511527
referent_table="dataset_alias",
512528
local_cols=["alias_id"],
513529
remote_cols=["id"],
@@ -523,7 +539,7 @@ def downgrade():
523539
unique=False,
524540
)
525541
batch_op.create_foreign_key(
526-
constraint_name=op.f("dataset_alias_dataset_dataset_id_fkey"),
542+
constraint_name="ds_dsa_alias_id" if is_sqlite else op.f("dataset_alias_dataset_dataset_id_fkey"),
527543
referent_table="dataset",
528544
local_cols=["dataset_id"],
529545
remote_cols=["id"],
@@ -540,7 +556,9 @@ def downgrade():
540556
unique=False,
541557
)
542558
batch_op.create_foreign_key(
543-
constraint_name=op.f("dataset_alias_dataset_event_alias_id_fkey"),
559+
constraint_name="dss_de_alias_id"
560+
if is_sqlite
561+
else op.f("dataset_alias_dataset_event_alias_id_fkey"),
544562
referent_table="dataset_alias",
545563
local_cols=["alias_id"],
546564
remote_cols=["id"],
@@ -556,7 +574,9 @@ def downgrade():
556574
unique=False,
557575
)
558576
batch_op.create_foreign_key(
559-
constraint_name=op.f("dataset_alias_dataset_event_event_id_fkey"),
577+
constraint_name="dss_de_event_id"
578+
if is_sqlite
579+
else op.f("dataset_alias_dataset_event_event_id_fkey"),
560580
referent_table="dataset_event",
561581
local_cols=["event_id"],
562582
remote_cols=["id"],

airflow-core/src/airflow/utils/db.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1219,12 +1219,6 @@ def downgrade(*, to_revision, from_revision=None, show_sql_only=False, session:
12191219
except ImportError:
12201220
log.warning("Import error occurred while importing FABDBManager. Skipping the check.")
12211221
pass
1222-
if not inspect(settings.engine).has_table("ab_user"):
1223-
log.error(
1224-
"Downgrade to revision less than 3.0.0 requires that `ab_user` table is present. "
1225-
"Please add FabDBManager to [core] external_db_managers and run fab migrations before proceeding"
1226-
)
1227-
return
12281222
with create_global_lock(session=session, lock=DBLocks.MIGRATIONS):
12291223
if show_sql_only:
12301224
log.warning("Generating sql scripts for manual migration.")

0 commit comments

Comments
 (0)