initial commit
This commit is contained in:
commit
fc19367423
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
__pycache__
|
||||
gmon.out
|
||||
*.pyc
|
||||
dist/
|
||||
build/
|
||||
*.egg-info
|
1
chaind/__init__.py
Normal file
1
chaind/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .setup import Environment
|
1
chaind/db/migrations/default/README
Normal file
1
chaind/db/migrations/default/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
85
chaind/db/migrations/default/alembic.ini
Normal file
85
chaind/db/migrations/default/alembic.ini
Normal file
@ -0,0 +1,85 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to ./versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat ./versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/chaind
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks=black
|
||||
# black.type=console_scripts
|
||||
# black.entrypoint=black
|
||||
# black.options=-l 79
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
77
chaind/db/migrations/default/env.py
Normal file
77
chaind/db/migrations/default/env.py
Normal file
@ -0,0 +1,77 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
chaind/db/migrations/default/script.py.mako
Normal file
24
chaind/db/migrations/default/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,28 @@
|
||||
"""chainqueue
|
||||
|
||||
Revision ID: 7ac591b16c68
|
||||
Revises: b139fca16787
|
||||
Create Date: 2021-06-03 13:11:24.579148
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from chainqueue.db.migrations.sqlalchemy import (
|
||||
chainqueue_upgrade,
|
||||
chainqueue_downgrade,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7ac591b16c68'
|
||||
down_revision = 'b139fca16787'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
chainqueue_upgrade()
|
||||
|
||||
|
||||
def downgrade():
|
||||
chainqueue_downgrade()
|
@ -0,0 +1,28 @@
|
||||
"""chainsyncer
|
||||
|
||||
Revision ID: b139fca16787
|
||||
Revises:
|
||||
Create Date: 2021-06-03 13:09:23.731381
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b139fca16787'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
chainsyncer_upgrade()
|
||||
|
||||
|
||||
def downgrade():
|
||||
chainsyncer_downgrade()
|
41
chaind/setup.py
Normal file
41
chaind/setup.py
Normal file
@ -0,0 +1,41 @@
|
||||
# standard imports
|
||||
import os
|
||||
import uuid
|
||||
|
||||
# external imports
|
||||
import chainqueue
|
||||
import chainsyncer
|
||||
from xdg.BaseDirectory import (
|
||||
xdg_data_dirs,
|
||||
get_runtime_dir,
|
||||
load_first_config,
|
||||
)
|
||||
|
||||
|
||||
class Environment:
|
||||
|
||||
def __init__(self, domain=None, session=None, env={}):
|
||||
if not session:
|
||||
session = env.get('CHAIND_SESSION')
|
||||
if not session:
|
||||
session = uuid.uuid4()
|
||||
self.__session = session
|
||||
|
||||
if not domain:
|
||||
domain = env.get('CHAIND_DOMAIN')
|
||||
|
||||
base_config_dir = load_first_config('chaind')
|
||||
self.runtime_dir = os.path.join(get_runtime_dir(), 'chaind')
|
||||
self.data_dir = os.path.join(xdg_data_dirs[0], 'chaind')
|
||||
self.config_dir = env.get('CONFINI_DIR', os.path.join(base_config_dir))
|
||||
self.session_runtime_dir = os.path.join(self.runtime_dir, self.session)
|
||||
|
||||
if domain:
|
||||
self.runtime_dir = os.path.join(self.runtime_dir, domain)
|
||||
self.data_dir = os.path.join(self.data_dir, domain)
|
||||
self.config_dir = os.path.join(self.config_dir, domain)
|
||||
self.session_runtime_dir = os.path.join(self.runtime_dir, self.session)
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return str(self.__session)
|
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
chainlib>0.0.2,<=0.0.3
|
||||
chainqueue>0.0.1,<=.0.0.2
|
||||
chainsyncer>0.0.1,<=0.0.2
|
||||
confini>0.3.5,<=0.3.6
|
||||
pyxdg~=0.26
|
||||
hexathon~=0.0.1a7
|
82
scripts/migrate.py
Normal file
82
scripts/migrate.py
Normal file
@ -0,0 +1,82 @@
|
||||
#!/usr/bin/python
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
import confini
|
||||
from xdg.BaseDirectory import (
|
||||
xdg_data_dirs,
|
||||
load_first_config,
|
||||
)
|
||||
import chainqueue.db
|
||||
import chainsyncer.db
|
||||
|
||||
# local imports
|
||||
from chaind import Environment
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'chaind', 'db')
|
||||
default_migrations_dir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
env = Environment(env=os.environ)
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=env.config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--data-dir', dest='data_dir', type=str, default=env.data_dir, help='data directory')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='path to alembic migrations directory')
|
||||
argparser.add_argument('--reset', action='store_true', help='reset exsting database')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
# process config
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'SESSION_DATA_DIR': getattr(args, 'data_dir'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli args')
|
||||
|
||||
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
config.add(os.path.join(config.get('SESSION_DATA_DIR'), config.get('DATABASE_NAME')), 'DATABASE_NAME', True)
|
||||
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
config.add(os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE')), '_MIGRATIONS_DIR', True)
|
||||
if not os.path.isdir(config.get('_MIGRATIONS_DIR')):
|
||||
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
|
||||
config.add(os.path.join(args.migrations_dir, 'default'), '_MIGRATIONS_DIR', True)
|
||||
|
||||
os.makedirs(config.get('SESSION_DATA_DIR'), exist_ok=True)
|
||||
|
||||
dsn = chainqueue.db.dsn_from_config(config)
|
||||
|
||||
def main():
|
||||
logg.info('using migrations dir {}'.format(config.get('_MIGRATIONS_DIR')))
|
||||
logg.info('using db {}'.format(dsn))
|
||||
ac = AlembicConfig(os.path.join(config.get('_MIGRATIONS_DIR'), 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', dsn)
|
||||
ac.set_main_option('script_location', config.get('_MIGRATIONS_DIR'))
|
||||
|
||||
if args.reset:
|
||||
logg.debug('reset is set, purging existing content')
|
||||
alembic.command.downgrade(ac, 'base')
|
||||
|
||||
alembic.command.upgrade(ac, 'head')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
28
setup.cfg
Normal file
28
setup.cfg
Normal file
@ -0,0 +1,28 @@
|
||||
[metadata]
|
||||
name = chaind
|
||||
version = 0.0.1a1
|
||||
description = Base package for chain queue services
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/chaintools/chainqueue
|
||||
keywords =
|
||||
blockchain
|
||||
cryptocurrency
|
||||
p2p
|
||||
classifiers =
|
||||
Programming Language :: Python :: 3
|
||||
Operating System :: OS Independent
|
||||
Development Status :: 3 - Alpha
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||
Topic :: Internet
|
||||
# Topic :: Blockchain :: EVM
|
||||
license = GPL3
|
||||
licence_files =
|
||||
LICENSE.txt
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
packages =
|
||||
chaind
|
36
setup.py
Normal file
36
setup.py
Normal file
@ -0,0 +1,36 @@
|
||||
from setuptools import setup
|
||||
import configparser
|
||||
import os
|
||||
|
||||
|
||||
requirements = []
|
||||
f = open('requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
test_requirements = []
|
||||
f = open('test_requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
test_requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
postgres_requirements = [
|
||||
'psycopg2==2.8.6',
|
||||
] + requirements
|
||||
sqlite_requirements = [
|
||||
] + requirements
|
||||
setup(
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
extras_require={
|
||||
'postgres': postgres_requirements,
|
||||
'sqlite': sqlite_requirements,
|
||||
}
|
||||
)
|
0
test_requirements.txt
Normal file
0
test_requirements.txt
Normal file
Loading…
Reference in New Issue
Block a user