Skip to content

Commit

Permalink
API cumulative postgres migration changes.
Browse files Browse the repository at this point in the history
Signed-off-by: Doug Lovett <[email protected]>
  • Loading branch information
doug-lovett committed Jun 3, 2021
1 parent 0d379c5 commit 319593c
Show file tree
Hide file tree
Showing 81 changed files with 8,223 additions and 733 deletions.
31 changes: 31 additions & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
version: "3.1"

services:
####################### PPR Database Definition #######################
pprdb:
image: postgres:11
restart: always
environment:
POSTGRES_DB: postgres
POSTGRES_PASSWORD: postgres
ports:
- "54360:5432"
volumes:
- db-data:/var/lib/postgresql/data0

####################### PPR Unit Test Database Definition #######################
# pprdb-test:
# image: postgres:11
# restart: always
# environment:
# POSTGRES_DB: postgres
# POSTGRES_PASSWORD: postgres
# ports:
# - "54361:5432"
# volumes:
# - db-data:/var/lib/postgresql/data1


volumes:
db-data:
driver: local
7 changes: 5 additions & 2 deletions ppr-api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,12 @@ To use the mock pay-api service for local testing, set the .env variable:
1. Open the ppr-api directory in VS Code to treat it as a project (or WSL projec). To prevent version clashes, set up a virtual environment to install the Python packages used by this project.
1. Run `make setup`
1. Run `pip install .`
1. See [Oracle DB README](https://github.com/bcgov/ppr/tree/main/oracle-db/README.md) on running a local Docker Oracle PPR database.
1. Update the .env file to add your local environment variables including the database configuration. A sample .env file is provided.
1. See [test data README](./test_data/README.md) for instructions to set up unit test data.
1. Run a local instance of the Postgres PPR database.
1. From your project root run: `docker-compose up -d`
1. In your `venv` environment run: `python manage.py db upgrade`
1. In your `venv` environment load/reload unit test data. Run: `python manage.py create_test_data`


### Running the PPR-API
Start the flask server with `(python -m flask run -p 5000)`
Expand Down
52 changes: 49 additions & 3 deletions ppr-api/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,24 @@
"""Manage the database and some other items required to run the API
"""
import logging
import os
import sys

from flask import url_for
from flask import current_app, url_for
from flask_script import Manager # class for handling a set of commands
from flask_migrate import Migrate, MigrateCommand
from sqlalchemy.sql import text

from ppr_api import create_app
from ppr_api.models import db
# models included so that migrate can build the database migrations
# from ppr_api import models # pylint: disable=unused-import
from ppr_api import models # pylint: disable=unused-import

APP = create_app()
MIGRATE = Migrate(APP, db)
MANAGER = Manager(APP)

# MANAGER.add_command('db', MigrateCommand)
MANAGER.add_command('db', MigrateCommand)


@MANAGER.command
Expand All @@ -50,6 +53,49 @@ def list_routes():
print(line)


def execute_script(session, file_name):
"""Execute a SQL script as one or more SQL statements in a single file."""
print('Executing SQL statements in file ' + file_name)
with open(file_name, 'r') as sql_file:
sql_command = ''
# Iterate over all lines in the sql file
for line in sql_file:
# Ignore commented lines
if not line.startswith('--') and line.strip('\n'):
# Append line to the command string
sql_command += line.strip('\n')

# If the command string ends with ';', it is a full statement
if sql_command.endswith(';'):
sql_command = sql_command.replace(';', '')
# print('Executing SQL: ' + sql_command)
# Try to execute statement and commit it
try:
session.execute(text(sql_command))

# Assert in case of error
except Exception as ex:
print(repr(ex))

# Finally, clear command string
finally:
sql_command = ''

session.commit()
sql_file.close()


@MANAGER.command
def create_test_data():
"""Load unit test data in the dev/local environment. Delete all existing test data as a first step."""
execute_script(db.session, 'test_data/postgres_test_reset.sql')
execute_script(db.session, 'test_data/postgres_create_first.sql')
filenames = os.listdir(os.path.join(os.getcwd(), 'test_data/postgres_data_files'))
sorted_names = sorted(filenames)
for filename in sorted_names:
execute_script(db.session, os.path.join(os.getcwd(), ('test_data/postgres_data_files/' + filename)))


if __name__ == '__main__':
logging.log(logging.INFO, 'Running the Manager')
MANAGER.run()
1 change: 1 addition & 0 deletions ppr-api/migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
50 changes: 50 additions & 0 deletions ppr-api/migrations/alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# A generic, single database configuration.

[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false


# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,alembic_utils

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[logger_alembic_utils]
level = INFO
handlers =
qualname = alembic_utils

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
125 changes: 125 additions & 0 deletions ppr-api/migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
from __future__ import with_statement

import logging
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context
from alembic_utils.replaceable_entity import register_entities

from database.postgres_functions import (
get_draft_document_number,
get_registration_num,
match_individual_name,
searchkey_aircraft,
searchkey_business_name,
searchkey_first_name,
searchkey_last_name,
searchkey_mhr,
searchkey_name_match,
searchkey_nickname_match,
searchkey_vehicle
)


# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

# Registering db functions here
register_entities([get_draft_document_number,
get_registration_num,
searchkey_name_match,
searchkey_nickname_match,
match_individual_name,
searchkey_aircraft,
searchkey_business_name,
searchkey_first_name,
searchkey_last_name,
searchkey_mhr,
searchkey_vehicle])


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""

# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')

connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions ppr-api/migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
Loading

0 comments on commit 319593c

Please sign in to comment.