Initial import
This commit is contained in:
commit
09f0b0672d
64 changed files with 3735 additions and 0 deletions
1
migrations/README
Normal file
1
migrations/README
Normal file
|
@ -0,0 +1 @@
|
|||
Single-database configuration for Flask.
|
50
migrations/alembic.ini
Normal file
50
migrations/alembic.ini
Normal file
|
@ -0,0 +1,50 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic,flask_migrate
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[logger_flask_migrate]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = flask_migrate
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
91
migrations/env.py
Normal file
91
migrations/env.py
Normal file
|
@ -0,0 +1,91 @@
|
|||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url',
|
||||
str(current_app.extensions['migrate'].db.get_engine().url).replace(
|
||||
'%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = current_app.extensions['migrate'].db.get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
124
migrations/versions/07c4fb2af22c_initial_schema.py
Normal file
124
migrations/versions/07c4fb2af22c_initial_schema.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
"""initial schema
|
||||
|
||||
Revision ID: 07c4fb2af22c
|
||||
Revises:
|
||||
Create Date: 2022-03-31 12:36:02.922753
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '07c4fb2af22c'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('group',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_name', sa.String(length=80), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('eotk', sa.Boolean(), nullable=True),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_group')),
|
||||
sa.UniqueConstraint('group_name', name=op.f('uq_group_group_name'))
|
||||
)
|
||||
op.create_table('bridge_conf',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=20), nullable=False),
|
||||
sa.Column('method', sa.String(length=20), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=True),
|
||||
sa.Column('number', sa.Integer(), nullable=True),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_bridge_conf_group_id_group')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_bridge_conf'))
|
||||
)
|
||||
op.create_table('origin',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.Column('domain_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_origin_group_id_group')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_origin')),
|
||||
sa.UniqueConstraint('domain_name', name=op.f('uq_origin_domain_name'))
|
||||
)
|
||||
op.create_table('bridge',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('conf_id', sa.Integer(), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('deprecated', sa.DateTime(), nullable=True),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.Column('terraform_updated', sa.DateTime(), nullable=True),
|
||||
sa.Column('fingerprint', sa.String(length=255), nullable=True),
|
||||
sa.Column('hashed_fingerprint', sa.String(length=255), nullable=True),
|
||||
sa.Column('bridgeline', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['conf_id'], ['bridge_conf.id'], name=op.f('fk_bridge_conf_id_bridge_conf')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_bridge'))
|
||||
)
|
||||
op.create_table('mirror',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('origin_id', sa.Integer(), nullable=False),
|
||||
sa.Column('url', sa.String(length=255), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('deprecated', sa.DateTime(), nullable=True),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_mirror_origin_id_origin')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_mirror')),
|
||||
sa.UniqueConstraint('url', name=op.f('uq_mirror_url'))
|
||||
)
|
||||
op.create_table('proxy',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('origin_id', sa.Integer(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=20), nullable=False),
|
||||
sa.Column('slug', sa.String(length=20), nullable=True),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('deprecated', sa.DateTime(), nullable=True),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.Column('terraform_updated', sa.DateTime(), nullable=True),
|
||||
sa.Column('url', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_proxy_origin_id_origin')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_proxy'))
|
||||
)
|
||||
op.create_table('alarm',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('target', sa.String(length=60), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=True),
|
||||
sa.Column('origin_id', sa.Integer(), nullable=True),
|
||||
sa.Column('proxy_id', sa.Integer(), nullable=True),
|
||||
sa.Column('bridge_id', sa.Integer(), nullable=True),
|
||||
sa.Column('alarm_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('alarm_state', sa.Enum('UNKNOWN', 'OK', 'WARNING', 'CRITICAL', name='alarmstate'), nullable=False),
|
||||
sa.Column('state_changed', sa.DateTime(), nullable=False),
|
||||
sa.Column('last_updated', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['bridge_id'], ['bridge.id'], name=op.f('fk_alarm_bridge_id_bridge')),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_alarm_group_id_group')),
|
||||
sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_alarm_origin_id_origin')),
|
||||
sa.ForeignKeyConstraint(['proxy_id'], ['proxy.id'], name=op.f('fk_alarm_proxy_id_proxy')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_alarm'))
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('alarm')
|
||||
op.drop_table('proxy')
|
||||
op.drop_table('mirror')
|
||||
op.drop_table('bridge')
|
||||
op.drop_table('origin')
|
||||
op.drop_table('bridge_conf')
|
||||
op.drop_table('group')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,38 @@
|
|||
"""alarms text and destroy origins
|
||||
|
||||
Revision ID: 59c9a5185e88
|
||||
Revises: 5c69fe874e4d
|
||||
Create Date: 2022-04-07 16:30:27.888327
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '59c9a5185e88'
|
||||
down_revision = '5c69fe874e4d'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('alarm', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('text', sa.String(length=255), nullable=True))
|
||||
|
||||
with op.batch_alter_table('origin', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('destroyed', sa.DateTime(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('origin', schema=None) as batch_op:
|
||||
batch_op.drop_column('destroyed')
|
||||
|
||||
with op.batch_alter_table('alarm', schema=None) as batch_op:
|
||||
batch_op.drop_column('text')
|
||||
|
||||
# ### end Alembic commands ###
|
32
migrations/versions/5c69fe874e4d_add_bridge_nicknames.py
Normal file
32
migrations/versions/5c69fe874e4d_add_bridge_nicknames.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
"""add bridge nicknames
|
||||
|
||||
Revision ID: 5c69fe874e4d
|
||||
Revises: e1332e4cb910
|
||||
Create Date: 2022-04-05 15:48:36.552558
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5c69fe874e4d'
|
||||
down_revision = 'e1332e4cb910'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('bridge', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('nickname', sa.String(length=255), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('bridge', schema=None) as batch_op:
|
||||
batch_op.drop_column('nickname')
|
||||
|
||||
# ### end Alembic commands ###
|
41
migrations/versions/e1332e4cb910_add_mirror_lists.py
Normal file
41
migrations/versions/e1332e4cb910_add_mirror_lists.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
"""add mirror lists
|
||||
|
||||
Revision ID: e1332e4cb910
|
||||
Revises: 07c4fb2af22c
|
||||
Create Date: 2022-03-31 13:33:49.067575
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e1332e4cb910'
|
||||
down_revision = '07c4fb2af22c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('mirror_list',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('format', sa.String(length=20), nullable=False),
|
||||
sa.Column('container', sa.String(length=255), nullable=False),
|
||||
sa.Column('branch', sa.String(length=255), nullable=False),
|
||||
sa.Column('filename', sa.String(length=255), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('deprecated', sa.DateTime(), nullable=True),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_mirror_list'))
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('mirror_list')
|
||||
# ### end Alembic commands ###
|
Loading…
Add table
Add a link
Reference in a new issue