mirror of
https://github.com/quay/quay.git
synced 2025-04-18 10:44:06 +03:00
chore: Update alembic migration script to support sqlite db (PROJQUAY-7097) (#2839)
* Add sqlite db support on quay start up * Add batchmode to migration scripts to support sqlite db * Add sqlite db to config-tool validator + alembic migration * Fix migration script to prevent db row locking Added commit statement to ensure previous transaction is completed before the next one within the same table * Clean up unused sqlite volume * Apply black formatting to migration scripts * Address review comments * Ensure py39-unit test runs the alembic migration on Sqlite * Add static type checking for alembic config file name * alembic remove commit and invalidate during migration When disconnecting from db, alembic tries to rollback causing PendingRollbackError * Bump go version in config-tool Dockerfile * Explicitly commit transaction to prevent db table locking * Clean up + remove debug statements * Undo database secret key change * Add TEST_DATABASE_URI to py39-unit to run unit test with sqlite db * Drop index before dropping column to prevent sqlite error * Add test coverage + address last set of reviews --------- Signed-off-by: harishsurf <hgovinda@redhat.com>
This commit is contained in:
parent
98c44a1858
commit
074f35ee99
@ -6,7 +6,7 @@ RUN yum install -y nodejs && \
|
||||
npm install --ignore-engines && \
|
||||
npm run build
|
||||
|
||||
FROM golang:1.19-alpine
|
||||
FROM golang:1.22-alpine
|
||||
|
||||
RUN apk update && apk add ca-certificates && rm -rf /var/cache/apk/* && mkdir /usr/local/share/ca-certificates/extra
|
||||
WORKDIR /go/src/config-tool
|
||||
@ -15,6 +15,6 @@ RUN rm -rf /go/src/config-tool/pkg/lib/editor/static/build
|
||||
COPY --from=jsbuild /jssrc/static/build /go/src/config-tool/pkg/lib/editor/static/build
|
||||
|
||||
RUN go get -d -v ./...
|
||||
RUN go install -v ./...
|
||||
RUN go install -v ./...
|
||||
|
||||
ENTRYPOINT [ "config-tool" ]
|
||||
|
@ -62,6 +62,7 @@ require (
|
||||
github.com/mailru/easyjson v0.7.6 // indirect
|
||||
github.com/mattn/go-ieproxy v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.22
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/minio/sha256-simd v1.0.1 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
|
@ -192,6 +192,8 @@ github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.66 h1:bnTOXOHjOqv/gcMuiVbN9o2ngRItvqE774dG9nq0Dzw=
|
||||
|
@ -26,6 +26,7 @@ import (
|
||||
"github.com/go-redis/redis/v8"
|
||||
"github.com/go-sql-driver/mysql"
|
||||
"github.com/jackc/pgx/v4"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
@ -654,6 +655,24 @@ func ValidateDatabaseConnection(opts Options, rawURI, caCert string, threadlocal
|
||||
}
|
||||
return errors.New("if you are using a Postgres database, you must install the pg_trgm extension")
|
||||
|
||||
} else if scheme == "sqlite" {
|
||||
// Open a connection to the SQLite database
|
||||
db, err := sql.Open("sqlite3", dbname)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error connecting to sqlite database: %s", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Try to ping database
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
log.Debugf("Pinging sqlite database at %s db path:", dbname)
|
||||
err = db.PingContext(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
return errors.New("you must use a valid scheme")
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Alembic's configuration
|
||||
config = context.config
|
||||
assert config.config_file_name is not None, "Alembic config file name must be set"
|
||||
|
||||
logging.config.fileConfig(config.config_file_name)
|
||||
|
||||
@ -97,9 +98,6 @@ def run_migrations_online():
|
||||
|
||||
In this scenario we need to create an Engine and associate a connection with the context.
|
||||
"""
|
||||
if isinstance(db.obj, SqliteDatabase) and "DB_URI" not in os.environ:
|
||||
logger.info("Skipping Sqlite migration!")
|
||||
return
|
||||
|
||||
engine = get_engine()
|
||||
connection = engine.connect()
|
||||
@ -107,6 +105,7 @@ def run_migrations_online():
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
transactional_ddl=False,
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -25,5 +25,6 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("user", "creation_date")
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("creation_date")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -26,5 +26,6 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("user", "maximum_queued_builds_count")
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("maximum_queued_builds_count")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -14,8 +14,10 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column("blobupload", "byte_count", existing_type=sa.BigInteger(), nullable=False)
|
||||
with op.batch_alter_table("blobupload") as batch_op:
|
||||
batch_op.alter_column("byte_count", existing_type=sa.BigInteger(), nullable=False)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.alter_column("blobupload", "byte_count", existing_type=sa.BigInteger(), nullable=True)
|
||||
with op.batch_alter_table("blobupload") as batch_op:
|
||||
batch_op.alter_column("byte_count", existing_type=sa.BigInteger(), nullable=True)
|
||||
|
@ -48,8 +48,9 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("repositorybuildtrigger", "successive_internal_error_count")
|
||||
op.drop_column("repositorybuildtrigger", "successive_failure_count")
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.drop_column("successive_internal_error_count")
|
||||
batch_op.drop_column("successive_failure_count")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(
|
||||
|
@ -27,5 +27,6 @@ def upgrade(op, tables, tester):
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("user_last_accessed", table_name="user")
|
||||
op.drop_column("user", "last_accessed")
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("last_accessed")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -23,7 +23,11 @@ def upgrade(op, tables, tester):
|
||||
if "uploadedblob" not in table_names:
|
||||
op.create_table(
|
||||
"uploadedblob",
|
||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||
sa.Column(
|
||||
"id",
|
||||
sa.BigInteger().with_variant(sa.Integer, "sqlite"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("repository_id", sa.Integer(), nullable=False),
|
||||
sa.Column("blob_id", sa.Integer(), nullable=False),
|
||||
sa.Column("uploaded_at", sa.DateTime(), nullable=False),
|
||||
|
@ -89,6 +89,10 @@ def upgrade(op, tables, tester):
|
||||
|
||||
logger.info("Migrating to external_reference from existing columns")
|
||||
op.add_column("repomirrorconfig", sa.Column("external_reference", sa.Text(), nullable=True))
|
||||
# when adding the "external_reference" column, the table "repomirrorconfig" gets locked preventing running queries on it
|
||||
# immediately after. Hence explicitly committing the transaction to ensure lock gets released.
|
||||
if op.get_context().dialect.name != "sqlite":
|
||||
op.get_bind().execute("COMMIT")
|
||||
|
||||
logger.info("Reencrypting existing columns")
|
||||
if app.config.get("SETUP_COMPLETE", False) and not tester.is_testing():
|
||||
@ -141,13 +145,13 @@ def upgrade(op, tables, tester):
|
||||
repo_mirror.external_reference = repo
|
||||
repo_mirror.save()
|
||||
|
||||
op.drop_column("repomirrorconfig", "external_registry")
|
||||
op.drop_column("repomirrorconfig", "external_namespace")
|
||||
op.drop_column("repomirrorconfig", "external_repository")
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.drop_column("external_registry")
|
||||
batch_op.drop_column("external_namespace")
|
||||
batch_op.drop_column("external_repository")
|
||||
|
||||
op.alter_column(
|
||||
"repomirrorconfig", "external_reference", nullable=False, existing_type=sa.Text()
|
||||
)
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.alter_column("external_reference", nullable=False, existing_type=sa.Text())
|
||||
|
||||
tester.populate_column("repomirrorconfig", "external_reference", tester.TestDataType.String)
|
||||
|
||||
@ -182,20 +186,20 @@ def downgrade(op, tables, tester):
|
||||
repo_mirror.external_repository = parts[2] if len(parts) >= 3 else "DOWNGRADE-FAILED"
|
||||
repo_mirror.save()
|
||||
|
||||
op.drop_column("repomirrorconfig", "external_reference")
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.drop_column("external_reference")
|
||||
|
||||
op.alter_column(
|
||||
"repomirrorconfig", "external_registry", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_namespace",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_repository",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"external_registry", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"external_namespace",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"external_repository",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
|
@ -23,23 +23,23 @@ def upgrade(op, tables, tester):
|
||||
"messages",
|
||||
sa.Column("severity", sa.String(length=255), nullable=False, server_default="info"),
|
||||
)
|
||||
op.alter_column(
|
||||
"messages",
|
||||
"uuid",
|
||||
existing_type=mysql.VARCHAR(length=36),
|
||||
server_default="",
|
||||
nullable=False,
|
||||
)
|
||||
with op.batch_alter_table("messages") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"uuid",
|
||||
existing_type=mysql.VARCHAR(length=36),
|
||||
server_default="",
|
||||
nullable=False,
|
||||
)
|
||||
op.create_index("messages_media_type_id", "messages", ["media_type_id"], unique=False)
|
||||
op.create_index("messages_severity", "messages", ["severity"], unique=False)
|
||||
op.create_index("messages_uuid", "messages", ["uuid"], unique=False)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_messages_media_type_id_mediatype"),
|
||||
"messages",
|
||||
"mediatype",
|
||||
["media_type_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("messages") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_messages_media_type_id_mediatype"),
|
||||
"mediatype",
|
||||
["media_type_id"],
|
||||
["id"],
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(
|
||||
@ -58,13 +58,20 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f("fk_messages_media_type_id_mediatype"), "messages", type_="foreignkey")
|
||||
with op.batch_alter_table("messages") as batch_op:
|
||||
batch_op.drop_constraint(op.f("fk_messages_media_type_id_mediatype"), type_="foreignkey")
|
||||
op.drop_index("messages_uuid", table_name="messages")
|
||||
op.drop_index("messages_severity", table_name="messages")
|
||||
op.drop_index("messages_media_type_id", table_name="messages")
|
||||
op.alter_column("messages", "uuid", existing_type=mysql.VARCHAR(length=36), nullable=True)
|
||||
op.drop_column("messages", "severity")
|
||||
op.drop_column("messages", "media_type_id")
|
||||
with op.batch_alter_table("messages") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"uuid",
|
||||
existing_type=mysql.VARCHAR(length=36),
|
||||
server_default="",
|
||||
nullable=True, # Change back to nullable if needed
|
||||
)
|
||||
batch_op.drop_column("severity")
|
||||
batch_op.drop_column("media_type_id")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(
|
||||
|
@ -18,4 +18,5 @@ def upgrade(op, tables, tester):
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.drop_column("organizationrhskus", "quantity")
|
||||
with op.batch_alter_table("organizationrhskus") as batch_op:
|
||||
batch_op.drop_column("quantity")
|
||||
|
@ -15,9 +15,11 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
# Alter the column to use BIGINT data type
|
||||
op.alter_column("user", "removed_tag_expiration_s", type_=sa.BigInteger)
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.alter_column("removed_tag_expiration_s", type_=sa.BigInteger)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# Alter the column to use INT data type, this might fail if there are already values that are bigger than INT but we do not intend to support downgrades anyway
|
||||
op.alter_column("user", "removed_tag_expiration_s", type_=sa.Integer)
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.alter_column("removed_tag_expiration_s", type_=sa.Integer)
|
||||
|
@ -15,75 +15,55 @@ import sqlalchemy as sa
|
||||
def upgrade(op, tables, tester):
|
||||
|
||||
# Adjust existing fields to be nullable.
|
||||
op.alter_column("accesstoken", "code", nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column(
|
||||
"oauthaccesstoken", "access_token", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"oauthauthorizationcode", "code", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"appspecificauthtoken", "token_code", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("accesstoken") as batch_op:
|
||||
batch_op.alter_column("code", nullable=True, existing_type=sa.String(length=255))
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.alter_column("access_token", nullable=True, existing_type=sa.String(length=255))
|
||||
with op.batch_alter_table("oauthauthorizationcode") as batch_op:
|
||||
batch_op.alter_column("code", nullable=True, existing_type=sa.String(length=255))
|
||||
with op.batch_alter_table("appspecificauthtoken") as batch_op:
|
||||
batch_op.alter_column("token_code", nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
# Adjust new fields to be non-nullable.
|
||||
op.alter_column(
|
||||
"accesstoken", "token_name", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"accesstoken", "token_code", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("accesstoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=False, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_code", nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"appspecificauthtoken", "token_name", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"appspecificauthtoken", "token_secret", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("appspecificauthtoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=False, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_secret", nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"oauthaccesstoken", "token_name", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"oauthaccesstoken", "token_code", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=False, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_code", nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"oauthauthorizationcode", "code_name", nullable=False, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"oauthauthorizationcode",
|
||||
"code_credential",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
with op.batch_alter_table("oauthauthorizationcode") as batch_op:
|
||||
batch_op.alter_column("code_name", nullable=False, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column(
|
||||
"code_credential",
|
||||
nullable=False,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
|
||||
op.alter_column("accesstoken", "token_name", nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column("accesstoken", "token_code", nullable=True, existing_type=sa.String(length=255))
|
||||
with op.batch_alter_table("accesstoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=True, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_code", nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"appspecificauthtoken", "token_name", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"appspecificauthtoken", "token_secret", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("appspecificauthtoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=True, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_secret", nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"oauthaccesstoken", "token_name", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"oauthaccesstoken", "token_code", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.alter_column("token_name", nullable=True, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column("token_code", nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column(
|
||||
"oauthauthorizationcode", "code_name", nullable=True, existing_type=sa.String(length=255)
|
||||
)
|
||||
op.alter_column(
|
||||
"oauthauthorizationcode",
|
||||
"code_credential",
|
||||
nullable=True,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
with op.batch_alter_table("oauthauthorizationcode") as batch_op:
|
||||
batch_op.alter_column("code_name", nullable=True, existing_type=sa.String(length=255))
|
||||
batch_op.alter_column(
|
||||
"code_credential",
|
||||
nullable=True,
|
||||
existing_type=sa.String(length=255),
|
||||
)
|
||||
|
@ -176,7 +176,9 @@ def upgrade(op, tables, tester):
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.drop_column("repository", "state")
|
||||
op.drop_index("repository_state", table_name="repository")
|
||||
with op.batch_alter_table("repository") as batch_op:
|
||||
batch_op.drop_column("state")
|
||||
|
||||
op.drop_table("repomirrorconfig")
|
||||
|
||||
|
@ -51,13 +51,13 @@ def upgrade(op, tables, tester):
|
||||
["disabled_reason_id"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repositorybuildtrigger_disabled_reason_id_disablereason"),
|
||||
"repositorybuildtrigger",
|
||||
"disablereason",
|
||||
["disabled_reason_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repositorybuildtrigger_disabled_reason_id_disablereason"),
|
||||
"disablereason",
|
||||
["disabled_reason_id"],
|
||||
["id"],
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
@ -76,8 +76,9 @@ def downgrade(op, tables, tester):
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_index("repositorybuildtrigger_disabled_reason_id", table_name="repositorybuildtrigger")
|
||||
op.drop_column("repositorybuildtrigger", "enabled")
|
||||
op.drop_column("repositorybuildtrigger", "disabled_reason_id")
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.drop_column("enabled")
|
||||
batch_op.drop_column("disabled_reason_id")
|
||||
op.drop_table("disablereason")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
@ -16,8 +16,10 @@ from util.migrate import UTF8LongText
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column("manifest", "manifest_bytes", existing_type=sa.Text(), type_=UTF8LongText())
|
||||
with op.batch_alter_table("manifest") as batch_op:
|
||||
batch_op.alter_column("manifest_bytes", existing_type=sa.Text(), type_=UTF8LongText())
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.alter_column("manifest", "manifest_bytes", existing_type=UTF8LongText(), type_=sa.Text())
|
||||
with op.batch_alter_table("manifest") as batch_op:
|
||||
batch_op.alter_column("manifest_bytes", existing_type=UTF8LongText(), type_=sa.Text())
|
||||
|
@ -14,20 +14,20 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
table_name="logentry",
|
||||
column_name="id",
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.BigInteger(),
|
||||
)
|
||||
with op.batch_alter_table("logentry") as batch_op:
|
||||
batch_op.alter_column(
|
||||
column_name="id",
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.BigInteger(),
|
||||
)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
table_name="logentry",
|
||||
column_name="id",
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.Integer(),
|
||||
)
|
||||
with op.batch_alter_table("logentry") as batch_op:
|
||||
batch_op.alter_column(
|
||||
column_name="id",
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.Integer(),
|
||||
)
|
||||
|
@ -18,7 +18,11 @@ def upgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"logentry3",
|
||||
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||
sa.Column(
|
||||
"id",
|
||||
sa.Integer() if op.get_context().dialect.name == "sqlite" else sa.BigInteger(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("kind_id", sa.Integer(), nullable=False),
|
||||
sa.Column("account_id", sa.Integer(), nullable=False),
|
||||
sa.Column("performer_id", sa.Integer(), nullable=True),
|
||||
|
@ -129,12 +129,6 @@ class OAuthApplication(BaseModel):
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
|
||||
# NOTE: Disconnects the Alembic database connection. We do this because the Peewee calls below
|
||||
# use a *different* connection, and if we leave the alembic connection open, it'll time out.
|
||||
# See: https://github.com/sqlalchemy/alembic/issues/630
|
||||
op.get_bind().execute("COMMIT")
|
||||
op.get_bind().invalidate()
|
||||
|
||||
from app import app
|
||||
|
||||
if app.config.get("SETUP_COMPLETE", False) or tester.is_testing():
|
||||
|
@ -37,5 +37,6 @@ def upgrade(op, tables, tester):
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("repositorybuildtrigger_disabled_datetime", table_name="repositorybuildtrigger")
|
||||
op.drop_column("repositorybuildtrigger", "disabled_datetime")
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.drop_column("disabled_datetime")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -48,6 +48,7 @@ def upgrade(op, tables, tester):
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("manifest_repository_id_config_media_type", table_name="manifest")
|
||||
op.drop_column("manifest", "layers_compressed_size")
|
||||
op.drop_column("manifest", "config_media_type")
|
||||
with op.batch_alter_table("manifest") as batch_op:
|
||||
batch_op.drop_column("layers_compressed_size")
|
||||
batch_op.drop_column("config_media_type")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -15,323 +15,295 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
# DerivedStorageForImage
|
||||
op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_derivative_id_imagestorage"),
|
||||
"derivedstorageforimage",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_source_image_id_image"),
|
||||
"derivedstorageforimage",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_transformation_constraint"),
|
||||
"derivedstorageforimage",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("derivedstorageforimage") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_derivative_id_imagestorage"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_source_image_id_image"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_derivedstorageforimage_transformation_constraint"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# RepositoryTag
|
||||
op.drop_constraint(
|
||||
op.f("fk_repositorytag_image_id_image"),
|
||||
"repositorytag",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_repositorytag_repository_id_repository"),
|
||||
"repositorytag",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("repositorytag") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_repositorytag_image_id_image"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_repositorytag_repository_id_repository"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TorrentInfo
|
||||
op.drop_constraint(
|
||||
op.f("fk_torrentinfo_storage_id_imagestorage"),
|
||||
"torrentinfo",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("torrentinfo") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_torrentinfo_storage_id_imagestorage"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TagManifest
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifest_tag_id_repositorytag"),
|
||||
"tagmanifest",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("tagmanifest") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifest_tag_id_repositorytag"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TagManifestToManifest
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifesttomanifest_manifest_id_manifest"),
|
||||
"tagmanifesttomanifest",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifesttomanifest",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("tagmanifesttomanifest") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifesttomanifest_manifest_id_manifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TagManifestLabel
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_annotated_id_tagmanifest"),
|
||||
"tagmanifestlabel",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_label_id_label"),
|
||||
"tagmanifestlabel",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_repository_id_repository"),
|
||||
"tagmanifestlabel",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("tagmanifestlabel") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_annotated_id_tagmanifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_label_id_label"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabel_repository_id_repository"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TagManifestLabelMap
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_label_id_label"),
|
||||
"tagmanifestlabelmap",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_id_manifest"),
|
||||
"tagmanifestlabelmap",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_label_id_manifestlabel"),
|
||||
"tagmanifestlabelmap",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifestlabelmap",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_label_id_tagmanifestlabel"),
|
||||
"tagmanifestlabelmap",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("tagmanifestlabelmap") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_label_id_label"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_id_manifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_label_id_manifestlabel"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_id_tagmanifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_label_id_tagmanifestlabel"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
# TagToRepositoryTag
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagtorepositorytag_repository_id_repository"),
|
||||
"tagtorepositorytag",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagtorepositorytag_repository_tag_id_repositorytag"),
|
||||
"tagtorepositorytag",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_tagtorepositorytag_tag_id_tag"), "tagtorepositorytag", type_="foreignkey"
|
||||
)
|
||||
with op.batch_alter_table("tagtorepositorytag") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagtorepositorytag_repository_id_repository"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_tagtorepositorytag_repository_tag_id_repositorytag"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(op.f("fk_tagtorepositorytag_tag_id_tag"), type_="foreignkey")
|
||||
|
||||
# Image
|
||||
op.drop_constraint(op.f("fk_image_repository_id_repository"), "image", type_="foreignkey")
|
||||
op.drop_constraint(op.f("fk_image_storage_id_imagestorage"), "image", type_="foreignkey")
|
||||
with op.batch_alter_table("image") as batch_op:
|
||||
batch_op.drop_constraint(op.f("fk_image_repository_id_repository"), type_="foreignkey")
|
||||
batch_op.drop_constraint(op.f("fk_image_storage_id_imagestorage"), type_="foreignkey")
|
||||
|
||||
# ManifestLegacyImage
|
||||
op.drop_constraint(
|
||||
op.f("fk_manifestlegacyimage_image_id_image"), "manifestlegacyimage", type_="foreignkey"
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_manifestlegacyimage_manifest_id_manifest"),
|
||||
"manifestlegacyimage",
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("fk_manifestlegacyimage_repository_id_repository"),
|
||||
"manifestlegacyimage",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("manifestlegacyimage") as batch_op:
|
||||
batch_op.drop_constraint(op.f("fk_manifestlegacyimage_image_id_image"), type_="foreignkey")
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_manifestlegacyimage_manifest_id_manifest"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_manifestlegacyimage_repository_id_repository"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# DerivedStorageForImage
|
||||
op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_derivative_id_imagestorage"),
|
||||
"derivedstorageforimage",
|
||||
"imagestorage",
|
||||
["derivative_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_source_image_id_image"),
|
||||
"derivedstorageforimage",
|
||||
"image",
|
||||
["source_image_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_transformation_constraint"),
|
||||
"derivedstorageforimage",
|
||||
"imagestoragetransformation",
|
||||
["transformation_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("derivedstorageforimage") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_derivative_id_imagestorage"),
|
||||
"imagestorage",
|
||||
["derivative_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_source_image_id_image"),
|
||||
"image",
|
||||
["source_image_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_derivedstorageforimage_transformation_constraint"),
|
||||
"imagestoragetransformation",
|
||||
["transformation_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# RepositoryTag
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repositorytag_image_id_image"),
|
||||
"repositorytag",
|
||||
"image",
|
||||
["image_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repositorytag_repository_id_repository"),
|
||||
"repositorytag",
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("repositorytag") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repositorytag_image_id_image"),
|
||||
"image",
|
||||
["image_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repositorytag_repository_id_repository"),
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TorrentInfo
|
||||
op.create_foreign_key(
|
||||
op.f("fk_torrentinfo_storage_id_imagestorage"),
|
||||
"torrentinfo",
|
||||
"imagestorage",
|
||||
["storage_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("torrentinfo") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_torrentinfo_storage_id_imagestorage"),
|
||||
"imagestorage",
|
||||
["storage_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TagManifest
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifest_tag_id_repositorytag"),
|
||||
"tagmanifest",
|
||||
"repositorytag",
|
||||
["tag_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("tagmanifest") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifest_tag_id_repositorytag"),
|
||||
"repositorytag",
|
||||
["tag_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TagManifestToManifest
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifesttomanifest_manifest_id_manifest"),
|
||||
"tagmanifesttomanifest",
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifesttomanifest",
|
||||
"tagmanifest",
|
||||
["tag_manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("tagmanifesttomanifest") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifesttomanifest_manifest_id_manifest"),
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifest",
|
||||
["tag_manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TagManifestLabel
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_annotated_id_tagmanifest"),
|
||||
"tagmanifestlabel",
|
||||
"tagmanifest",
|
||||
["annotated_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_label_id_label"),
|
||||
"tagmanifestlabel",
|
||||
"label",
|
||||
["label_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_repository_id_repository"),
|
||||
"tagmanifestlabel",
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("tagmanifestlabel") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_annotated_id_tagmanifest"),
|
||||
"tagmanifest",
|
||||
["annotated_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_label_id_label"),
|
||||
"label",
|
||||
["label_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabel_repository_id_repository"),
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TagManifestLabelMap
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_label_id_label"),
|
||||
"tagmanifestlabelmap",
|
||||
"label",
|
||||
["label_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_id_manifest"),
|
||||
"tagmanifestlabelmap",
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_label_id_manifestlabel"),
|
||||
"tagmanifestlabelmap",
|
||||
"manifest_label",
|
||||
["manifest_label_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifestlabelmap",
|
||||
"tagmanifest",
|
||||
["tag_manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_label_id_tagmanifestlabel"),
|
||||
"tagmanifestlabelmap",
|
||||
"tagmanifestlabel",
|
||||
["tag_manifest_label_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("tagmanifestlabelmap") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_label_id_label"),
|
||||
"label",
|
||||
["label_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_id_manifest"),
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_manifest_label_id_manifestlabel"),
|
||||
"manifest_label",
|
||||
["manifest_label_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_id_tagmanifest"),
|
||||
"tagmanifest",
|
||||
["tag_manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagmanifestlabelmap_tag_manifest_label_id_tagmanifestlabel"),
|
||||
"tagmanifestlabel",
|
||||
["tag_manifest_label_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# TagToRepositoryTag
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_repository_id_repository"),
|
||||
"tagtorepositorytag",
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_repository_tag_id_repositorytag"),
|
||||
"tagtorepositorytag",
|
||||
"repositorytag",
|
||||
["repository_tag_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_tag_id_tag"),
|
||||
"tagtorepositorytag",
|
||||
"tag",
|
||||
["tag_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("tagtorepositorytag") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_repository_id_repository"),
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_repository_tag_id_repositorytag"),
|
||||
"repositorytag",
|
||||
["repository_tag_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_tagtorepositorytag_tag_id_tag"),
|
||||
"tag",
|
||||
["tag_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# Image
|
||||
op.create_foreign_key(
|
||||
op.f("fk_image_repository_id_repository"), "image", "repository", ["repository_id"], ["id"]
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_image_storage_id_imagestorage"), "image", "imagestorage", ["storage_id"], ["id"]
|
||||
)
|
||||
with op.batch_alter_table("image") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_image_repository_id_repository"), "repository", ["repository_id"], ["id"]
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_image_storage_id_imagestorage"), "imagestorage", ["storage_id"], ["id"]
|
||||
)
|
||||
|
||||
# ManifestLegacyImage
|
||||
op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_image_id_image"),
|
||||
"manifestlegacyimage",
|
||||
"image",
|
||||
["image_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_manifest_id_manifest"),
|
||||
"manifestlegacyimage",
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_repository_id_repository"),
|
||||
"manifestlegacyimage",
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("manifestlegacyimage") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_image_id_image"),
|
||||
"image",
|
||||
["image_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_manifest_id_manifest"),
|
||||
"manifest",
|
||||
["manifest_id"],
|
||||
["id"],
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_manifestlegacyimage_repository_id_repository"),
|
||||
"repository",
|
||||
["repository_id"],
|
||||
["id"],
|
||||
)
|
||||
|
@ -14,32 +14,28 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
"proxycacheconfig",
|
||||
"upstream_registry_username",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
op.alter_column(
|
||||
"proxycacheconfig",
|
||||
"upstream_registry_password",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
with op.batch_alter_table("proxycacheconfig") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"upstream_registry_username",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"upstream_registry_password",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
"proxycacheconfig",
|
||||
"upstream_registry_username",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
op.alter_column(
|
||||
"proxycacheconfig",
|
||||
"upstream_registry_password",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
with op.batch_alter_table("proxycacheconfig") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"upstream_registry_username",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"upstream_registry_password",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
|
@ -35,13 +35,13 @@ def upgrade(op, tables, tester):
|
||||
"repository", sa.Column("kind_id", sa.Integer(), nullable=False, server_default="1")
|
||||
)
|
||||
op.create_index("repository_kind_id", "repository", ["kind_id"], unique=False)
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repository_kind_id_repositorykind"),
|
||||
"repository",
|
||||
"repositorykind",
|
||||
["kind_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("repository") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repository_kind_id_repositorykind"),
|
||||
"repositorykind",
|
||||
["kind_id"],
|
||||
["id"],
|
||||
)
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column("repository", "kind_id", tester.TestDataType.Foreign("repositorykind"))
|
||||
@ -53,5 +53,6 @@ def downgrade(op, tables, tester):
|
||||
op.f("fk_repository_kind_id_repositorykind"), "repository", type_="foreignkey"
|
||||
)
|
||||
op.drop_index("repository_kind_id", table_name="repository")
|
||||
op.drop_column("repository", "kind_id")
|
||||
with op.batch_alter_table("repository") as batch_op:
|
||||
batch_op.drop_column("kind_id")
|
||||
op.drop_table("repositorykind")
|
||||
|
@ -15,10 +15,11 @@ from sqlalchemy.dialects import mysql
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column("blobupload", "byte_count", existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
op.alter_column(
|
||||
"blobupload", "uncompressed_byte_count", existing_type=sa.Integer(), type_=sa.BigInteger()
|
||||
)
|
||||
with op.batch_alter_table("blobupload") as batch_op:
|
||||
batch_op.alter_column("byte_count", existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
batch_op.alter_column(
|
||||
"uncompressed_byte_count", existing_type=sa.Integer(), type_=sa.BigInteger()
|
||||
)
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column("blobupload", "byte_count", tester.TestDataType.BigInteger)
|
||||
@ -32,7 +33,8 @@ def downgrade(op, tables, tester):
|
||||
tester.populate_column("blobupload", "uncompressed_byte_count", tester.TestDataType.Integer)
|
||||
# ### end population of test data ### #
|
||||
|
||||
op.alter_column("blobupload", "byte_count", existing_type=sa.BigInteger(), type_=sa.Integer())
|
||||
op.alter_column(
|
||||
"blobupload", "uncompressed_byte_count", existing_type=sa.BigInteger(), type_=sa.Integer()
|
||||
)
|
||||
with op.batch_alter_table("blobupload") as batch_op:
|
||||
batch_op.alter_column("byte_count", existing_type=sa.BigInteger(), type_=sa.Integer())
|
||||
batch_op.alter_column(
|
||||
"uncompressed_byte_count", existing_type=sa.BigInteger(), type_=sa.Integer()
|
||||
)
|
||||
|
@ -23,19 +23,31 @@ logger = logging.getLogger(__name__)
|
||||
def upgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("oauthaccesstoken_refresh_token", table_name="oauthaccesstoken")
|
||||
op.drop_column("oauthaccesstoken", "refresh_token")
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.drop_column("refresh_token")
|
||||
|
||||
op.drop_column("accesstoken", "code")
|
||||
op.drop_index("accesstoken_code", table_name="accesstoken")
|
||||
with op.batch_alter_table("accesstoken") as batch_op:
|
||||
batch_op.drop_column("code")
|
||||
|
||||
op.drop_column("appspecificauthtoken", "token_code")
|
||||
op.drop_index("appspecificauthtoken_token_code", table_name="appspecificauthtoken")
|
||||
with op.batch_alter_table("appspecificauthtoken") as batch_op:
|
||||
batch_op.drop_column("token_code")
|
||||
|
||||
op.drop_column("oauthaccesstoken", "access_token")
|
||||
op.drop_column("oauthapplication", "client_secret")
|
||||
op.drop_index("oauthaccesstoken_access_token", table_name="oauthaccesstoken")
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.drop_column("access_token")
|
||||
|
||||
op.drop_column("oauthauthorizationcode", "code")
|
||||
with op.batch_alter_table("oauthapplication") as batch_op:
|
||||
batch_op.drop_column("client_secret")
|
||||
|
||||
op.drop_column("repositorybuildtrigger", "private_key")
|
||||
op.drop_column("repositorybuildtrigger", "auth_token")
|
||||
op.drop_index("oauthauthorizationcode_code", table_name="oauthauthorizationcode")
|
||||
with op.batch_alter_table("oauthauthorizationcode") as batch_op:
|
||||
batch_op.drop_column("code")
|
||||
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.drop_column("private_key")
|
||||
batch_op.drop_column("auth_token")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Overwrite all plaintext robot credentials.
|
||||
|
@ -110,30 +110,37 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("repositorybuildtrigger", "secure_private_key")
|
||||
op.drop_column("repositorybuildtrigger", "secure_auth_token")
|
||||
with op.batch_alter_table("repositorybuildtrigger") as batch_op:
|
||||
batch_op.drop_column("secure_private_key")
|
||||
batch_op.drop_column("secure_auth_token")
|
||||
|
||||
op.drop_index("oauthauthorizationcode_code", table_name="oauthauthorizationcode")
|
||||
op.create_index("oauthauthorizationcode_code", "oauthauthorizationcode", ["code"], unique=False)
|
||||
op.drop_index("oauthauthorizationcode_code_name", table_name="oauthauthorizationcode")
|
||||
op.drop_column("oauthauthorizationcode", "code_name")
|
||||
op.drop_column("oauthauthorizationcode", "code_credential")
|
||||
with op.batch_alter_table("oauthauthorizationcode") as batch_op:
|
||||
batch_op.drop_column("code_name")
|
||||
batch_op.drop_column("code_credential")
|
||||
|
||||
op.drop_column("oauthapplication", "secure_client_secret")
|
||||
with op.batch_alter_table("oauthapplication") as batch_op:
|
||||
batch_op.drop_column("secure_client_secret")
|
||||
|
||||
op.drop_index("oauthaccesstoken_token_name", table_name="oauthaccesstoken")
|
||||
op.drop_column("oauthaccesstoken", "token_name")
|
||||
op.drop_column("oauthaccesstoken", "token_code")
|
||||
with op.batch_alter_table("oauthaccesstoken") as batch_op:
|
||||
batch_op.drop_column("token_name")
|
||||
batch_op.drop_column("token_code")
|
||||
|
||||
op.drop_column("emailconfirmation", "verification_code")
|
||||
with op.batch_alter_table("emailconfirmation") as batch_op:
|
||||
batch_op.drop_column("verification_code")
|
||||
|
||||
op.drop_index("appspecificauthtoken_token_name", table_name="appspecificauthtoken")
|
||||
op.drop_column("appspecificauthtoken", "token_secret")
|
||||
op.drop_column("appspecificauthtoken", "token_name")
|
||||
with op.batch_alter_table("appspecificauthtoken") as batch_op:
|
||||
batch_op.drop_column("token_secret")
|
||||
batch_op.drop_column("token_name")
|
||||
|
||||
op.drop_index("accesstoken_token_name", table_name="accesstoken")
|
||||
op.drop_column("accesstoken", "token_name")
|
||||
op.drop_column("accesstoken", "token_code")
|
||||
with op.batch_alter_table("accesstoken") as batch_op:
|
||||
batch_op.drop_column("token_name")
|
||||
batch_op.drop_column("token_code")
|
||||
|
||||
op.drop_table("robotaccounttoken")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -14,7 +14,8 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.drop_column("imagestorage", "checksum")
|
||||
with op.batch_alter_table("imagestorage") as batch_op:
|
||||
batch_op.drop_column("checksum")
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
|
@ -25,4 +25,5 @@ def upgrade(op, tables, tester):
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.drop_column("user", "location")
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("location")
|
||||
|
@ -16,20 +16,20 @@ import sqlalchemy as sa
|
||||
def upgrade(op, tables, tester):
|
||||
# Note: In order to set unique=False, we cannot simply alter index. We need to drop the corresponding foreign key constraint first
|
||||
# and then drop the index and recreate it.
|
||||
op.drop_constraint(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"repositoryautoprunepolicy",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("repositoryautoprunepolicy") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_index("repositoryautoprunepolicy_namespace_id", table_name="repositoryautoprunepolicy")
|
||||
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"repositoryautoprunepolicy",
|
||||
"user",
|
||||
["namespace_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("repositoryautoprunepolicy") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"user",
|
||||
["namespace_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_index(
|
||||
"repositoryautoprunepolicy_namespace_id",
|
||||
"repositoryautoprunepolicy",
|
||||
@ -39,20 +39,20 @@ def upgrade(op, tables, tester):
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.drop_constraint(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"repositoryautoprunepolicy",
|
||||
type_="foreignkey",
|
||||
)
|
||||
with op.batch_alter_table("repositoryautoprunepolicy") as batch_op:
|
||||
batch_op.drop_constraint(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
type_="foreignkey",
|
||||
)
|
||||
op.drop_index("repositoryautoprunepolicy_namespace_id", table_name="repositoryautoprunepolicy")
|
||||
|
||||
op.create_foreign_key(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"repositoryautoprunepolicy",
|
||||
"user",
|
||||
["namespace_id"],
|
||||
["id"],
|
||||
)
|
||||
with op.batch_alter_table("repositoryautoprunepolicy") as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
op.f("fk_repositoryautoprunepolicy_namespace_id_user"),
|
||||
"user",
|
||||
["namespace_id"],
|
||||
["id"],
|
||||
)
|
||||
op.create_index(
|
||||
"repositoryautoprunepolicy_namespace_id",
|
||||
"repositoryautoprunepolicy",
|
||||
|
@ -14,32 +14,28 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_registry_username",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_registry_password",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"external_registry_username",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"external_registry_password",
|
||||
type_=sa.String(length=4096),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_registry_username",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
op.alter_column(
|
||||
"repomirrorconfig",
|
||||
"external_registry_password",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
with op.batch_alter_table("repomirrorconfig") as batch_op:
|
||||
batch_op.alter_column(
|
||||
"external_registry_username",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"external_registry_password",
|
||||
type_=sa.String(length=2048),
|
||||
nullable=True,
|
||||
)
|
||||
|
@ -33,7 +33,8 @@ def upgrade(op, tables, tester):
|
||||
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
op.drop_column("repositorynotification", "number_of_failures")
|
||||
with op.batch_alter_table("repositorynotification") as batch_op:
|
||||
batch_op.drop_column("number_of_failures")
|
||||
op.execute(
|
||||
tables.logentrykind.delete().where(
|
||||
tables.logentrykind.c.name == op.inline_literal("reset_repo_notification")
|
||||
|
@ -17,7 +17,8 @@ from sqlalchemy.dialects import mysql
|
||||
def upgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("manifestblob_manifest_id_blob_index", table_name="manifestblob")
|
||||
op.drop_column("manifestblob", "blob_index")
|
||||
with op.batch_alter_table("manifestblob") as batch_op:
|
||||
batch_op.drop_column("blob_index")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
@ -36,7 +36,8 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("repository", "trust_enabled")
|
||||
with op.batch_alter_table("repository") as batch_op:
|
||||
batch_op.drop_column("trust_enabled")
|
||||
### end Alembic commands ###
|
||||
|
||||
op.execute(
|
||||
|
@ -39,9 +39,10 @@ def upgrade(op, tables, tester):
|
||||
|
||||
def downgrade(op, tables, tester):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("user", "given_name")
|
||||
op.drop_column("user", "family_name")
|
||||
op.drop_column("user", "company")
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("given_name")
|
||||
batch_op.drop_column("family_name")
|
||||
batch_op.drop_column("company")
|
||||
### end Alembic commands ###
|
||||
|
||||
op.execute(
|
||||
|
@ -30,5 +30,6 @@ def upgrade(op, tables, tester):
|
||||
def downgrade(op, tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index("queueitem_state_id", table_name="queueitem")
|
||||
op.drop_column("queueitem", "state_id")
|
||||
with op.batch_alter_table("queueitem") as batch_op:
|
||||
batch_op.drop_column("state_id")
|
||||
# ### end Alembic commands ###
|
||||
|
@ -6,8 +6,8 @@ BITTORRENT_FILENAME_PEPPER: 0ee18f90-5b6d-42d2-ab5e-ec9fcd846272
|
||||
BUILDLOGS_REDIS:
|
||||
host: quay-redis
|
||||
port: 6379
|
||||
DATABASE_SECRET_KEY: '30060361640793187613697366923211113205676925445650250274752125083971638376224'
|
||||
DB_URI: postgresql://quay:quay@quay-db/quay
|
||||
DATABASE_SECRET_KEY: '30060361640793187613697366923211113205676925445650250274752125083971638376224'
|
||||
DEFAULT_TAG_EXPIRATION: 2w
|
||||
DISTRIBUTED_STORAGE_CONFIG:
|
||||
default:
|
||||
|
@ -1,4 +1,4 @@
|
||||
alembic==1.3.3
|
||||
alembic==1.13.1
|
||||
aniso8601 @ git+https://github.com/DevTable/aniso8601-fake.git@bd7762c7dea0498706d3f57db60cd8a8af44ba90
|
||||
APScheduler==3.10.4
|
||||
attrs==19.3.0
|
||||
|
Binary file not shown.
13
tox.ini
13
tox.ini
@ -37,6 +37,19 @@ environment =
|
||||
MYSQL_ALLOW_EMPTY_PASSWORD=1
|
||||
MYSQL_USER=quay
|
||||
|
||||
[testenv:py39-unit]
|
||||
setenv =
|
||||
PYTHONDONTWRITEBYTECODE = 1
|
||||
PYTHONPATH={toxinidir}{:}{toxinidir}
|
||||
TEST=true
|
||||
SKIP_DB_SCHEMA=true
|
||||
MARKERS="not e2e"
|
||||
TEST_DATABASE_URI=sqlite:///test/data/sqlite_test.db
|
||||
commands =
|
||||
python --version
|
||||
alembic upgrade head
|
||||
pytest --timeout=3600 -m {env:MARKERS} --exitfirst --ignore=buildman/test/test_buildman.py -vv {env:FILE:} {posargs}
|
||||
|
||||
[testenv:py39-mysql]
|
||||
setenv =
|
||||
PYTHONDONTWRITEBYTECODE = 1
|
||||
|
@ -11,7 +11,10 @@ def copy_table_contents(source_table, destination_table, conn):
|
||||
conn.execute(
|
||||
"INSERT INTO `%s` SELECT * FROM `%s` WHERE 1" % (destination_table, source_table)
|
||||
)
|
||||
result = list(conn.execute("Select Max(id) from `%s` WHERE 1" % destination_table))[0]
|
||||
if result[0] is not None:
|
||||
new_start_id = result[0] + 1
|
||||
conn.execute("ALTER TABLE `%s` AUTO_INCREMENT = %s" % (destination_table, new_start_id))
|
||||
if conn.engine.name != "sqlite":
|
||||
result = list(conn.execute("Select Max(id) from `%s` WHERE 1" % destination_table))[0]
|
||||
if result[0] is not None:
|
||||
new_start_id = result[0] + 1
|
||||
conn.execute(
|
||||
"ALTER TABLE `%s` AUTO_INCREMENT = %s" % (destination_table, new_start_id)
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user