chore: migrate to more db migration utils (#33155)

This commit is contained in:
Elizabeth Thompson
2025-04-22 11:26:54 -07:00
committed by GitHub
parent ae48dba3e1
commit b6628cdfd2
6 changed files with 48 additions and 45 deletions

View File

@@ -22,19 +22,20 @@ Create Date: 2022-04-05 13:27:06.028908
"""
from alembic import op
from superset.migrations.shared.utils import create_index, drop_index
# revision identifiers, used by Alembic.
revision = "cdcf3d64daf4"
down_revision = "7fb8bca906d2"
from alembic import op # noqa: E402
def upgrade():
op.create_index(
op.f("ix_logs_user_id_dttm"), "logs", ["user_id", "dttm"], unique=False
create_index(
"logs", op.f("ix_logs_user_id_dttm"), ["user_id", "dttm"], unique=False
)
def downgrade():
op.drop_index(op.f("ix_logs_user_id_dttm"), table_name="logs")
drop_index(index_name=op.f("ix_logs_user_id_dttm"), table_name="logs")

View File

@@ -22,21 +22,20 @@ Create Date: 2023-06-21 14:02:08.200955
"""
import sqlalchemy as sa
from superset.migrations.shared.utils import add_columns, drop_columns
# revision identifiers, used by Alembic.
revision = "90139bf715e4"
down_revision = "83e1abbe777f"
import sqlalchemy as sa # noqa: E402
from alembic import op # noqa: E402
def upgrade():
op.add_column("metrics", sa.Column("currency", sa.String(128), nullable=True))
op.add_column("sql_metrics", sa.Column("currency", sa.String(128), nullable=True))
add_columns("metrics", sa.Column("currency", sa.String(128), nullable=True))
add_columns("sql_metrics", sa.Column("currency", sa.String(128), nullable=True))
def downgrade():
with op.batch_alter_table("sql_metrics") as batch_op_sql_metrics:
batch_op_sql_metrics.drop_column("currency")
with op.batch_alter_table("metrics") as batch_op_metrics:
batch_op_metrics.drop_column("currency")
drop_columns("sql_metrics", "currency")
drop_columns("metrics", "currency")

View File

@@ -26,37 +26,41 @@ Create Date: 2024-01-05 16:20:31.598995
revision = "65a167d4c62e"
down_revision = "06dd9ff00fe8"
from alembic import op # noqa: E402
from superset.migrations.shared.utils import create_index, drop_index # noqa: E402
def upgrade():
op.create_index(
"ix_report_execution_log_report_schedule_id",
create_index(
"report_execution_log",
"ix_report_execution_log_report_schedule_id",
["report_schedule_id"],
unique=False,
)
op.create_index(
"ix_report_execution_log_start_dttm",
create_index(
"report_execution_log",
"ix_report_execution_log_start_dttm",
["start_dttm"],
unique=False,
)
op.create_index(
"ix_report_recipient_report_schedule_id",
create_index(
"report_recipient",
"ix_report_recipient_report_schedule_id",
["report_schedule_id"],
unique=False,
)
def downgrade():
op.drop_index(
"ix_report_recipient_report_schedule_id", table_name="report_recipient"
drop_index(
index_name="ix_report_recipient_report_schedule_id",
table_name="report_recipient",
)
op.drop_index(
"ix_report_execution_log_start_dttm", table_name="report_execution_log"
drop_index(
index_name="ix_report_execution_log_start_dttm",
table_name="report_execution_log",
)
op.drop_index(
"ix_report_execution_log_report_schedule_id", table_name="report_execution_log"
drop_index(
index_name="ix_report_execution_log_report_schedule_id",
table_name="report_execution_log",
)

View File

@@ -27,6 +27,7 @@ from alembic import op
from sqlalchemy_utils import EncryptedType
from superset.migrations.shared.utils import (
create_index,
create_table,
drop_fks_for_table,
)
@@ -77,9 +78,9 @@ def upgrade():
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"idx_user_id_database_id",
create_index(
"database_user_oauth2_tokens",
"idx_user_id_database_id",
["user_id", "database_id"],
)

View File

@@ -22,28 +22,26 @@ Create Date: 2024-05-02 13:40:23.126659
"""
from alembic import op
from superset.migrations.shared.utils import create_index, drop_index
# revision identifiers, used by Alembic.
revision = "3dfd0e78650e"
down_revision = "5f57af97bc3f"
from alembic import op # noqa: E402
from superset.migrations.shared.utils import table_has_index # noqa: E402
table = "query"
index = "ix_sql_editor_id"
def upgrade():
if not table_has_index(table, index):
op.create_index(
op.f(index),
table,
["sql_editor_id"],
unique=False,
)
create_index(
table,
op.f(index),
["sql_editor_id"],
unique=False,
)
def downgrade():
if table_has_index(table, index):
op.drop_index(op.f(index), table_name=table)
drop_index(index_name=op.f(index), table_name=table)