Skip to content
Snippets Groups Projects
Commit 1d374179 authored by Bjarke Madsen's avatar Bjarke Madsen
Browse files

Rework database layer, get rid of flask-migrate

parent 71b79995
No related branches found
No related tags found
No related merge requests found
......@@ -8,10 +8,16 @@ from flask import Flask
from flask_cors import CORS # for debugging
from compendium_v2 import config, environment
from compendium_v2.db import db, db_survey, migrate
from compendium_v2.migrations import migration_utils
def create_app():
def migrate_database(config: dict) -> None:
dsn = config['SQLALCHEMY_DATABASE_URI']
migration_utils.upgrade(dsn)
def create_app() -> Flask:
"""
overrides default settings with those found
in the file read from env var SETTINGS_FILENAME
......@@ -19,7 +25,9 @@ def create_app():
:return: a new flask app instance
"""
assert 'SETTINGS_FILENAME' in os.environ
assert 'SETTINGS_FILENAME' in os.environ, \
"environment variable 'SETTINGS_FILENAME' is required"
with open(os.environ['SETTINGS_FILENAME']) as f:
app_config = config.load(f)
......@@ -28,12 +36,6 @@ def create_app():
app.secret_key = 'super secret session key'
app.config['CONFIG_PARAMS'] = app_config
app.config['SQLALCHEMY_DATABASE_URI'] = \
app_config['SQLALCHEMY_DATABASE_URI']
db.init_app(app)
db_survey.init_app(app)
migrate.init_app(app, db)
from compendium_v2.routes import default
app.register_blueprint(default.routes, url_prefix='/')
......@@ -48,4 +50,7 @@ def create_app():
environment.setup_logging()
# run migrations on startup
migrate_database(app_config)
return app
# A generic, single database configuration.
# only needed for generating new revision scripts
[alembic]
# make sure the right line is un / commented depending on which schema you want
# a migration for
script_location = migrations
# script_location = cachedb_migrations
# change this to run migrations from the command line
sqlalchemy.url = mysql+pymysql://compendium:compendium321@localhost/compendium
import os
from typing import Any
import contextlib
import logging
from typing import Optional, Union, Callable, Iterator
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import sessionmaker, Session
MIGRATION_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'migrations'))
logger = logging.getLogger(__name__)
_SESSION_MAKER: Union[None, sessionmaker] = None
# https://github.com/python/mypy/issues/2477a
base_schema: Any = declarative_base(metadata=MetaData(schema='presentation'))
db = SQLAlchemy(model_class=base_schema)
base_survey_schema: Any = declarative_base(metadata=MetaData(schema='survey'))
db_survey = SQLAlchemy(model_class=base_survey_schema)
@contextlib.contextmanager
def session_scope(
callback_before_close: Optional[Callable] = None) -> Iterator[Session]:
# best practice is to keep session scope separate from data processing
# cf. https://docs.sqlalchemy.org/en/13/orm/session_basics.html
migrate = Migrate(directory=MIGRATION_DIR)
assert _SESSION_MAKER
session = _SESSION_MAKER()
try:
yield session
session.commit()
if callback_before_close:
callback_before_close()
except SQLAlchemyError:
logger.error('caught sql layer exception, rolling back')
session.rollback()
raise # re-raise, will be handled by main consumer
finally:
session.close()
def mysql_dsn(db_username, db_password, db_hostname, db_name, port=3306):
return (f'mysql+pymysql://{db_username}:{db_password}'
f'@{db_hostname}:{port}/{db_name}?charset=utf8mb4')
def init_db_model(dsn):
global _SESSION_MAKER
# cf. https://docs.sqlalchemy.org/en
# /latest/orm/extensions/automap.html
engine = create_engine(dsn, pool_size=10, max_overflow=0)
_SESSION_MAKER = sessionmaker(bind=engine)
import enum
import logging
import sqlalchemy as sa
from typing import Any
from sqlalchemy.ext.declarative import declarative_base
# from sqlalchemy.orm import relationship
logger = logging.getLogger(__name__)
# https://github.com/python/mypy/issues/2477
base_schema: Any = declarative_base()
def _enum_names(enum_class):
return [x.name for x in list(enum_class)]
class BudgetType(enum.Enum):
YEARLY = 1
NREN = 2
class BudgetEntry(base_schema):
__tablename__ = 'budgets'
id = sa.Column(sa.Integer, primary_key=True)
budget_type = sa.Column(
'budget_type',
sa.Enum(*_enum_names(BudgetType), name='budget_type'),
nullable=False)
name = sa.Column(sa.String(128))
description = sa.Column(sa.String(2048))
import enum
import logging
from sqlalchemy.orm import relationship
from compendium_v2.db import base_schema, db
logger = logging.getLogger(__name__)
class DataEntrySection(db.Model):
__tablename__ = 'data_entry_sections'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128))
description = db.Column(db.String(2048))
is_active = db.Column(db.Boolean)
sort_order = db.Column(db.Integer)
items = relationship('DataEntryItem')
class DataSourceType(enum.Enum):
BUDGETS_BY_YEAR = 1
BUDGETS_BY_NREN = 2
data_entry_settings_assoc_table = db.Table(
'data_entry_settings_assoc_table',
base_schema.metadata,
db.Column('item_id', db.ForeignKey('data_entry_items.id')),
db.Column('setting_id', db.ForeignKey('data_entry_settings.id')))
class DataEntryItem(db.Model):
__tablename__ = 'data_entry_items'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(128))
description = db.Column(db.String(2048))
is_active = db.Column(db.Boolean)
sort_order = db.Column(db.Integer)
data_source = db.Column(db.Enum(DataSourceType))
section_id = db.Column(db.Integer, db.ForeignKey('data_entry_sections.id'))
section = relationship('DataEntrySection', back_populates='items')
settings = relationship('DataEntrySettings',
secondary=data_entry_settings_assoc_table)
class SettingType(enum.Enum):
COLOUR_PALETTE = 1
CHART_TYPE = 2
HELP_ITEM = 3
class DataEntrySettings(db.Model):
__tablename__ = 'data_entry_settings'
id = db.Column(db.Integer, primary_key=True)
setting_type = db.Column(db.Enum(SettingType))
setting_value = db.Column(db.String(512))
import logging
from compendium_v2.db import db_survey
logger = logging.getLogger(__name__)
class AnnualBudgetEntry(db_survey.Model):
__tablename__ = 'budgets'
id = db_survey.Column(db_survey.Integer, primary_key=True)
region = db_survey.Column(db_survey.String(7))
country = db_survey.Column(db_survey.Text())
budget = db_survey.Column(db_survey.Text())
year = db_survey.Column(db_survey.Integer())
country_code = db_survey.Column(db_survey.Text())
region_name = db_survey.Column(db_survey.Text())
def get_budget_by_year():
budget_data = db_survey.session.execute(
db_survey.select(AnnualBudgetEntry)
).scalars()
annual_data = {
}
seen_countries = set()
for line_item in budget_data:
li_year = line_item.year
li_country_code = line_item.country_code
if li_year not in annual_data:
annual_data[li_year] = {}
seen_countries.add(li_country_code)
annual_data[li_year][li_country_code] = line_item.budget
sorted_countries = sorted(seen_countries)
response_data = {
'labels': sorted_countries,
'datasets': []
}
for year in sorted(annual_data.keys()):
dataset = {
'label': str(year),
'data': []
}
for country in sorted_countries:
budget_amount = annual_data[year].get(country)
dataset['data'].append(float(budget_amount)
if budget_amount else None)
response_data['datasets'].append(dataset)
return response_data
def get_budget_by_nren():
budget_data = db_survey.session.execute(
db_survey.select(AnnualBudgetEntry)
.filter(AnnualBudgetEntry.region_name == 'Western Europe')
).scalars()
annual_data = {
}
seen_years = set()
for line_item in budget_data:
li_year = line_item.year
li_country_code = line_item.country_code
if li_country_code not in annual_data:
annual_data[li_country_code] = {}
seen_years.add(li_year)
annual_data[li_country_code][li_year] = line_item.budget
sorted_years = sorted(seen_years)
response_data = {
'labels': sorted_years,
'datasets': []
}
for country in sorted(annual_data.keys()):
dataset = {
'label': country,
'data': []
}
for year in sorted_years:
budget_amount = annual_data[country].get(year)
dataset['data'].append(float(budget_amount)
if budget_amount else None)
response_data['datasets'].append(dataset)
return response_data
# A generic, single database configuration.
[alembic]
# template used to generate migration files
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
from __future__ import with_statement
from compendium_v2.db import base_schema
import logging
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from flask import current_app
from compendium_v2.db.model import base_schema
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
......@@ -13,34 +12,20 @@ config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
logging.basicConfig(level=logging.INFO)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# from compendium_v2.db.models import DataEntryItem, DataEntrySection
# target_metadata = mymodel.Base.metadata
target_metadata = base_schema.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
......@@ -53,9 +38,12 @@ def run_migrations_offline():
script output.
"""
url = config.get_main_option('sqlalchemy.url')
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
......@@ -69,25 +57,15 @@ def run_migrations_online():
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = current_app.extensions['migrate'].db.get_engine()
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
......
import logging
import os
from compendium_v2 import db
from alembic.config import Config
from alembic import command
logger = logging.getLogger(__name__)
DEFAULT_MIGRATIONS_DIRECTORY = os.path.dirname(__file__)
def upgrade(dsn, migrations_directory=DEFAULT_MIGRATIONS_DIRECTORY):
"""
migrate db to head version
cf. https://stackoverflow.com/a/43530495,
https://stackoverflow.com/a/54402853
:param dsn: dsn string, passed to alembic
:param migrations_directory: full path to migrations directory
(default is this directory)
:return:
"""
alembic_config = Config()
alembic_config.set_main_option('script_location', migrations_directory)
alembic_config.set_main_option('sqlalchemy.url', dsn)
command.upgrade(alembic_config, 'head')
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
upgrade(db.mysql_dsn(
db_username='compendium',
db_password='compendium321',
db_hostname='localhost',
db_name='compendium',
port=3306))
"""Initial Data Entry models
Revision ID: 1e8ba780b977
Revises:
Create Date: 2022-11-28 12:23:36.478734
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '1e8ba780b977'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('data_entry_sections',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('description', sa.String(
length=2048), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
schema='presentation'
)
op.create_table('data_entry_settings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('setting_type',
sa.Enum('COLOUR_PALLETE', 'CHART_TYPE',
'HELP_ITEM', name='settingtype'),
nullable=True),
sa.Column('setting_value', sa.String(
length=512), nullable=True),
sa.PrimaryKeyConstraint('id'),
schema='presentation'
)
op.create_table('data_entry_items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=128), nullable=True),
sa.Column('description', sa.String(
length=2048), nullable=True),
sa.Column('sort_order', sa.Integer(), nullable=True),
sa.Column('is_visible', sa.Boolean(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('data_source',
sa.Enum('BUDGETS_BY_YEAR',
'BUDGETS_BY_NREN',
name='datasourcetype'),
nullable=True),
sa.Column('section_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['section_id'],
['presentation.data_entry_sections.id'], ),
sa.PrimaryKeyConstraint('id'),
schema='presentation'
)
op.create_table('data_entry_settings_assoc_table',
sa.Column('item_id', sa.Integer(), nullable=True),
sa.Column('setting_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['item_id'],
['presentation.data_entry_items.id'], ),
sa.ForeignKeyConstraint(
['setting_id'],
['presentation.data_entry_settings.id'], ),
schema='presentation'
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('data_entry_settings_assoc_table', schema='presentation')
op.drop_table('data_entry_items', schema='presentation')
op.drop_table('data_entry_settings', schema='presentation')
op.drop_table('data_entry_sections', schema='presentation')
# ### end Alembic commands ###
"""Initial DB
Revision ID: 95577456fcfd
Revises:
Create Date: 2022-12-26 08:08:09.711624
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '95577456fcfd'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'budgets',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('budget_type', sa.Enum(
'YEARLY', 'NREN', name='budget_type'), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('description', sa.String(
length=2048), nullable=True),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('budgets')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment