Implement chainlib basedir override

This commit is contained in:
Louis Holbrook
2021-08-26 08:09:47 +00:00
parent e7bc480f7c
commit 5ff72117bc
46 changed files with 1083 additions and 480 deletions

View File

View File

@@ -0,0 +1 @@
Generic single-database configuration.

View File

@@ -0,0 +1,85 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = .
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to ./versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
#level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,77 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,37 @@
from alembic import op
import sqlalchemy as sa
from chainsyncer.db.migrations.default.versions.src.sync import (
upgrade as upgrade_sync,
downgrade as downgrade_sync,
)
from chainsyncer.db.migrations.default.versions.src.sync_tx import (
upgrade as upgrade_sync_tx,
downgrade as downgrade_sync_tx,
)
def chainsyncer_upgrade(major=0, minor=0, patch=3):
r0_0_1_u()
if patch >= 3:
r0_0_3_u()
def chainsyncer_downgrade(major=0, minor=0, patch=3):
if patch >= 3:
r0_0_3_d()
r0_0_1_d()
def r0_0_1_u():
upgrade_sync()
def r0_0_1_d():
downgrade_sync()
# 0.0.3
def r0_0_3_u():
upgrade_sync_tx()
def r0_0_3_d():
downgrade_sync_tx()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,14 @@
"""base setup
Revision ID: 452ecfa81de3
Revises:
Create Date: 2021-07-16 16:29:32.460027
"""
# revision identifiers, used by Alembic.
revision = '452ecfa81de3'
down_revision = None
branch_labels = None
depends_on = None
from chainsyncer.db.migrations.default.versions.src.sync import upgrade, downgrade

View File

@@ -0,0 +1,14 @@
"""sync-tx
Revision ID: a2ce6826c5eb
Revises: 452ecfa81de3
Create Date: 2021-07-16 18:17:53.439721
"""
# revision identifiers, used by Alembic.
revision = 'a2ce6826c5eb'
down_revision = '452ecfa81de3'
branch_labels = None
depends_on = None
from chainsyncer.db.migrations.default.versions.src.sync_tx import upgrade, downgrade

View File

@@ -0,0 +1,32 @@
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'chain_sync',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('blockchain', sa.String, nullable=False),
sa.Column('block_start', sa.Integer, nullable=False, default=0),
sa.Column('tx_start', sa.Integer, nullable=False, default=0),
sa.Column('block_cursor', sa.Integer, nullable=False, default=0),
sa.Column('tx_cursor', sa.Integer, nullable=False, default=0),
sa.Column('block_target', sa.Integer, nullable=True),
sa.Column('date_created', sa.DateTime, nullable=False),
sa.Column('date_updated', sa.DateTime),
)
op.create_table(
'chain_sync_filter',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('chain_sync_id', sa.Integer, sa.ForeignKey('chain_sync.id'), nullable=True),
sa.Column('flags', sa.LargeBinary, nullable=True),
sa.Column('flags_start', sa.LargeBinary, nullable=True),
sa.Column('count', sa.Integer, nullable=False, default=0),
sa.Column('digest', sa.String(64), nullable=False),
)
def downgrade():
op.drop_table('chain_sync_filter')
op.drop_table('chain_sync')

View File

@@ -0,0 +1,17 @@
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'chain_sync_tx',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('blockchain', sa.String, nullable=False),
sa.Column('chain_sync_id', sa.Integer, sa.ForeignKey('chain_sync.id'), nullable=False),
sa.Column('flags', sa.LargeBinary, nullable=True),
sa.Column('block', sa.Integer, nullable=False),
sa.Column('tx', sa.Integer, nullable=False),
)
def downgrade():
op.drop_table('chain_sync_tx')

View File

@@ -1,36 +1,37 @@
from alembic import op
import sqlalchemy as sa
def chainsyncer_upgrade(major=0, minor=0, patch=1):
r0_0_1_u()
from chainsyncer.db.migrations.default.versions.tags.sync import
upgrade as upgrade_sync,
downgrade as downgrade_sync,
)
def chainsyncer_downgrade(major=0, minor=0, patch=1):
from chainsyncer.db.migrations.default.versions.tags.sync_tx import
upgrade as upgrade_sync_tx,
downgrade as downgrade_sync_tx,
)
def chainsyncer_upgrade(major=0, minor=0, patch=3):
r0_0_1_u()
if patch >= 3:
r0_0_3_u()
def chainsyncer_downgrade(major=0, minor=0, patch=3):
if patch >= 3:
r0_0_3_d()
r0_0_1_d()
def r0_0_1_u():
op.create_table(
'chain_sync',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('blockchain', sa.String, nullable=False),
sa.Column('block_start', sa.Integer, nullable=False, default=0),
sa.Column('tx_start', sa.Integer, nullable=False, default=0),
sa.Column('block_cursor', sa.Integer, nullable=False, default=0),
sa.Column('tx_cursor', sa.Integer, nullable=False, default=0),
sa.Column('block_target', sa.Integer, nullable=True),
sa.Column('date_created', sa.DateTime, nullable=False),
sa.Column('date_updated', sa.DateTime),
)
op.create_table(
'chain_sync_filter',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('chain_sync_id', sa.Integer, sa.ForeignKey('chain_sync.id'), nullable=True),
sa.Column('flags', sa.LargeBinary, nullable=True),
sa.Column('flags_start', sa.LargeBinary, nullable=True),
sa.Column('count', sa.Integer, nullable=False, default=0),
sa.Column('digest', sa.String(64), nullable=False),
)
upgrade_sync()
def r0_0_1_d():
op.drop_table('chain_sync_filter')
op.drop_table('chain_sync')
downgrade_sync()
# 0.0.3
def r0_0_3_u():
upgrade_sync_tx()
def r0_0_3_d():
downgrade_sync_tx()

View File

View File

@@ -120,3 +120,4 @@ class SessionBase(Model):
logg.debug('destroying session {}'.format(session_key))
session.commit()
session.close()
del SessionBase.localsessions[session_key]