updated to video upload

This commit is contained in:
2025-01-23 16:31:57 +02:00
parent 4b8d075bfe
commit 70a0065b98
28 changed files with 619 additions and 420 deletions

View File

@@ -1,17 +1,56 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# path to migration scripts
script_location = migrations
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# template used to generate migration files
file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date within the migration file as well as the filename.
# string value is passed to datetime.datetime.strftime(), default is to use UTC.
# output_timezone = UTC
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without a source .py file to be detected as revisions
# sourceless = false
# version location specification; this defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "os.pathsep".
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///dashboard.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# hooks =
# format the generated revision script to be PEP8 compliant
# hooks = pep8
# python
# hook_1.type = python
# hook_1.entrypoint = myproject.please_run_pep8
# hook_1.options = {"treat-as-warning": "true"}
# exec
# hook_2.type = exec
# hook_2.entrypoint = /usr/bin/pep8
# hook_2.options =
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
keys = root,sqlalchemy,alembic
[handlers]
keys = console
@@ -26,18 +65,15 @@ qualname =
[logger_sqlalchemy]
level = WARN
handlers =
handlers = console
qualname = sqlalchemy.engine
# propagate = 0
[logger_alembic]
level = INFO
handlers =
handlers = console
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
# propagate = 0
[handler_console]
class = StreamHandler
@@ -47,4 +83,4 @@ formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
datefmt = %H:%M:%S

View File

@@ -1,8 +1,8 @@
from __future__ import with_statement
import logging
from logging.config import fileConfig
from flask import current_app
from sqlalchemy import engine_from_config, pool
from alembic import context
# this is the Alembic Config object, which provides
@@ -14,30 +14,13 @@ config = context.config
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
from flask import current_app
config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
@@ -45,12 +28,6 @@ target_db = current_app.extensions['migrate'].db
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
@@ -65,7 +42,7 @@ def run_migrations_offline():
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
@@ -79,29 +56,14 @@ def run_migrations_online():
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()

View File

@@ -1,34 +0,0 @@
"""Initial migration
Revision ID: 0de18b4ddaa3
Revises:
Create Date: 2025-01-20 14:50:44.116314
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0de18b4ddaa3'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('player', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key('fk_user_id', 'user', ['user_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('player', schema=None) as batch_op:
batch_op.drop_constraint('fk_user_id', type_='foreignkey')
batch_op.drop_column('user_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,28 @@
"""Add theme column to user table
Revision ID: 173774695298
Revises: c07c6e720021
Create Date: 2025-01-23 15:16:46.761912
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '173774695298'
down_revision = 'c07c6e720021'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,28 @@
"""Add theme column to user table
Revision ID: c07c6e720021
Revises: e341b0e3043c
Create Date: 2025-01-23 15:13:01.413532
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c07c6e720021'
down_revision = 'e341b0e3043c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -0,0 +1,72 @@
"""Initial migration
Revision ID: e341b0e3043c
Revises:
Create Date: 2025-01-23 15:09:25.485823
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e341b0e3043c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('group',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('player',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=80), nullable=False),
sa.Column('hostname', sa.String(length=120), nullable=False),
sa.Column('password', sa.String(length=120), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('hostname'),
sa.UniqueConstraint('username')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=80), nullable=False),
sa.Column('password', sa.String(length=120), nullable=False),
sa.Column('role', sa.String(length=20), nullable=False),
sa.Column('theme', sa.String(length=10), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_table('content',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('file_name', sa.String(length=120), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('player_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['player_id'], ['player.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('group_players',
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('player_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['player_id'], ['player.id'], ),
sa.PrimaryKeyConstraint('group_id', 'player_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('group_players')
op.drop_table('content')
op.drop_table('user')
op.drop_table('player')
op.drop_table('group')
# ### end Alembic commands ###