Initial commit
This commit is contained in:
commit
6b044f18db
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
__pycache__
|
||||
*.pyc
|
57
chainqueue/db/__init__.py
Normal file
57
chainqueue/db/__init__.py
Normal file
@ -0,0 +1,57 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# local imports
|
||||
from chainqueue.db.models.base import SessionBase
|
||||
|
||||
logg = logging.getLogger().getChild(__name__)
|
||||
|
||||
|
||||
# an Engine, which the Session will use for connection
|
||||
# resources
|
||||
|
||||
|
||||
def dsn_from_config(config):
|
||||
"""Generate a dsn string from the provided config dict.
|
||||
|
||||
The config dict must include all well-known database connection parameters, and must implement the method "get(key)" to retrieve them. Any missing parameters will be be rendered as the literal string "None"
|
||||
|
||||
:param config: Configuration object
|
||||
:type config: Varies
|
||||
:returns: dsn string
|
||||
:rtype: str
|
||||
"""
|
||||
scheme = config.get('DATABASE_ENGINE')
|
||||
if config.get('DATABASE_DRIVER') != None:
|
||||
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
|
||||
|
||||
dsn = ''
|
||||
dsn_out = ''
|
||||
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
dsn = '{}:///{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_NAME'),
|
||||
)
|
||||
dsn_out = dsn
|
||||
|
||||
else:
|
||||
dsn = '{}://{}:{}@{}:{}/{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_USER'),
|
||||
config.get('DATABASE_PASSWORD'),
|
||||
config.get('DATABASE_HOST'),
|
||||
config.get('DATABASE_PORT'),
|
||||
config.get('DATABASE_NAME'),
|
||||
)
|
||||
dsn_out = '{}://{}:{}@{}:{}/{}'.format(
|
||||
scheme,
|
||||
config.get('DATABASE_USER'),
|
||||
'***',
|
||||
config.get('DATABASE_HOST'),
|
||||
config.get('DATABASE_PORT'),
|
||||
config.get('DATABASE_NAME'),
|
||||
)
|
||||
logg.debug('parsed dsn from config: {}'.format(dsn_out))
|
||||
return dsn
|
||||
|
1
chainqueue/db/migrations/default/README
Normal file
1
chainqueue/db/migrations/default/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
85
chainqueue/db/migrations/default/alembic.ini
Normal file
85
chainqueue/db/migrations/default/alembic.ini
Normal file
@ -0,0 +1,85 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to ./versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat ./versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks=black
|
||||
# black.type=console_scripts
|
||||
# black.entrypoint=black
|
||||
# black.options=-l 79
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
77
chainqueue/db/migrations/default/env.py
Normal file
77
chainqueue/db/migrations/default/env.py
Normal file
@ -0,0 +1,77 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
BIN
chainqueue/db/migrations/default/gmon.out
Normal file
BIN
chainqueue/db/migrations/default/gmon.out
Normal file
Binary file not shown.
24
chainqueue/db/migrations/default/script.py.mako
Normal file
24
chainqueue/db/migrations/default/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,38 @@
|
||||
"""Transaction cache
|
||||
|
||||
Revision ID: 2215c497248b
|
||||
Revises: c537a0fd8466
|
||||
Create Date: 2021-04-02 10:09:11.923949
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2215c497248b'
|
||||
down_revision = 'c537a0fd8466'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'tx_cache',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
|
||||
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||
sa.Column('date_updated', sa.DateTime, nullable=False),
|
||||
sa.Column('source_token_address', sa.String(42), nullable=False),
|
||||
sa.Column('destination_token_address', sa.String(42), nullable=False),
|
||||
sa.Column('sender', sa.String(42), nullable=False),
|
||||
sa.Column('recipient', sa.String(42), nullable=False),
|
||||
sa.Column('from_value', sa.NUMERIC(), nullable=False),
|
||||
sa.Column('to_value', sa.NUMERIC(), nullable=True),
|
||||
sa.Column('block_number', sa.BIGINT(), nullable=True),
|
||||
sa.Column('tx_index', sa.Integer, nullable=True),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('tx_cache')
|
@ -0,0 +1,30 @@
|
||||
"""Otx state history
|
||||
|
||||
Revision ID: 3e43847c0717
|
||||
Revises: 2215c497248b
|
||||
Create Date: 2021-04-02 10:10:58.656139
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3e43847c0717'
|
||||
down_revision = '2215c497248b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'otx_state_log',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False),
|
||||
sa.Column('date', sa.DateTime, nullable=False),
|
||||
sa.Column('status', sa.Integer, nullable=False),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('otx_state_log')
|
@ -0,0 +1,35 @@
|
||||
"""Outgoing queue
|
||||
|
||||
Revision ID: c537a0fd8466
|
||||
Revises:
|
||||
Create Date: 2021-04-02 10:04:27.092819
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c537a0fd8466'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'otx',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||
sa.Column('nonce', sa.Integer, nullable=False),
|
||||
sa.Column('tx_hash', sa.Text, nullable=False),
|
||||
sa.Column('signed_tx', sa.Text, nullable=False),
|
||||
sa.Column('status', sa.Integer, nullable=False, default=0),
|
||||
sa.Column('block', sa.Integer),
|
||||
)
|
||||
op.create_index('idx_otx_tx', 'otx', ['tx_hash'], unique=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index('idx_otx_tx')
|
||||
op.drop_table('otx')
|
121
chainqueue/db/models/base.py
Normal file
121
chainqueue/db/models/base.py
Normal file
@ -0,0 +1,121 @@
|
||||
# stanard imports
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
from sqlalchemy import Column, Integer
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import (
|
||||
StaticPool,
|
||||
QueuePool,
|
||||
AssertionPool,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
Model = declarative_base(name='Model')
|
||||
|
||||
|
||||
class SessionBase(Model):
|
||||
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
|
||||
"""
|
||||
__abstract__ = True
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
engine = None
|
||||
"""Database connection engine of the running aplication"""
|
||||
sessionmaker = None
|
||||
"""Factory object responsible for creating sessions from the connection pool"""
|
||||
transactional = True
|
||||
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
|
||||
poolable = True
|
||||
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
|
||||
procedural = True
|
||||
"""Whether the database backend supports stored procedures"""
|
||||
localsessions = {}
|
||||
"""Contains dictionary of sessions initiated by db model components"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def create_session():
|
||||
"""Creates a new database session.
|
||||
"""
|
||||
return SessionBase.sessionmaker()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _set_engine(engine):
|
||||
"""Sets the database engine static property
|
||||
"""
|
||||
SessionBase.engine = engine
|
||||
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def connect(dsn, pool_size=16, debug=False):
|
||||
"""Create new database connection engine and connect to database backend.
|
||||
|
||||
:param dsn: DSN string defining connection.
|
||||
:type dsn: str
|
||||
"""
|
||||
e = None
|
||||
if SessionBase.poolable:
|
||||
poolclass = QueuePool
|
||||
if pool_size > 1:
|
||||
e = create_engine(
|
||||
dsn,
|
||||
max_overflow=pool_size*3,
|
||||
pool_pre_ping=True,
|
||||
pool_size=pool_size,
|
||||
pool_recycle=60,
|
||||
poolclass=poolclass,
|
||||
echo=debug,
|
||||
)
|
||||
else:
|
||||
if debug:
|
||||
poolclass = AssertionPool
|
||||
else:
|
||||
poolclass = StaticPool
|
||||
|
||||
e = create_engine(
|
||||
dsn,
|
||||
poolclass=poolclass,
|
||||
echo=debug,
|
||||
)
|
||||
else:
|
||||
e = create_engine(
|
||||
dsn,
|
||||
echo=debug,
|
||||
)
|
||||
|
||||
SessionBase._set_engine(e)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def disconnect():
|
||||
"""Disconnect from database and free resources.
|
||||
"""
|
||||
SessionBase.engine.dispose()
|
||||
SessionBase.engine = None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def bind_session(session=None):
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
localsession_key = str(id(localsession))
|
||||
logg.debug('creating new session {}'.format(localsession_key))
|
||||
SessionBase.localsessions[localsession_key] = localsession
|
||||
return localsession
|
||||
|
||||
|
||||
@staticmethod
|
||||
def release_session(session=None):
|
||||
session_key = str(id(session))
|
||||
if SessionBase.localsessions.get(session_key) != None:
|
||||
logg.debug('commit and destroy session {}'.format(session_key))
|
||||
session.commit()
|
||||
session.close()
|
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
pysha3==1.0.2
|
||||
hexathon~=0.0.1a7
|
||||
alembic==1.4.2
|
||||
SQLAlchemy==1.3.20
|
33
setup.cfg
Normal file
33
setup.cfg
Normal file
@ -0,0 +1,33 @@
|
||||
[metadata]
|
||||
name = chainqueue
|
||||
version = 0.0.1a1
|
||||
description = Generic blockchain transaction queue control
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/nolash/chainlib
|
||||
keywords =
|
||||
cic
|
||||
cryptocurrency
|
||||
ethereum
|
||||
solidarity
|
||||
mutual_credit
|
||||
classifiers =
|
||||
Programming Language :: Python :: 3
|
||||
Operating System :: OS Independent
|
||||
Development Status :: 3 - Alpha
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||
Topic :: Internet
|
||||
# Topic :: Blockchain :: EVM
|
||||
license = GPL3
|
||||
licence_files =
|
||||
LICENSE.txt
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
packages =
|
||||
chainqueue
|
||||
|
||||
#[options.entry_points]
|
||||
#console_scripts =
|
36
setup.py
Normal file
36
setup.py
Normal file
@ -0,0 +1,36 @@
|
||||
from setuptools import setup
|
||||
import configparser
|
||||
import os
|
||||
|
||||
|
||||
requirements = []
|
||||
f = open('requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
test_requirements = []
|
||||
f = open('test_requirements.txt', 'r')
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l == '':
|
||||
break
|
||||
test_requirements.append(l.rstrip())
|
||||
f.close()
|
||||
|
||||
postgres_requirements = [
|
||||
'psycopg2==2.8.6',
|
||||
] + requirements
|
||||
sqlite_requirements = [
|
||||
] + requirements
|
||||
setup(
|
||||
install_requires=requirements,
|
||||
tests_require=test_requirements,
|
||||
extras_require={
|
||||
'postgres': postgres_requirements,
|
||||
'sqlite': sqlite_requirements,
|
||||
}
|
||||
)
|
83
tests/base.py
Normal file
83
tests/base.py
Normal file
@ -0,0 +1,83 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import unittest
|
||||
import tempfile
|
||||
import os
|
||||
#import pysqlite
|
||||
|
||||
# external imports
|
||||
from chainlib.chain import ChainSpec
|
||||
import alembic
|
||||
import alembic.config
|
||||
|
||||
# local imports
|
||||
from chainqueue.db import dsn_from_config
|
||||
from chainqueue.db.models.base import SessionBase
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
|
||||
logg = logging.getLogger().getChild(__name__)
|
||||
|
||||
|
||||
class TestBase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||
dbdir = os.path.join(rootdir, 'chainqueue', 'db')
|
||||
#migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||
#if not os.path.isdir(migrationsdir):
|
||||
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
|
||||
logg.info('using migrations directory {}'.format(migrationsdir))
|
||||
|
||||
# db_dir = tempfile.mkdtemp()
|
||||
# self.db_path = os.path.join(db_dir, 'test.sqlite')
|
||||
# config = {
|
||||
# 'DATABASE_ENGINE': 'sqlite',
|
||||
# 'DATABASE_DRIVER': 'pysqlite',
|
||||
# 'DATABASE_NAME': self.db_path,
|
||||
# }
|
||||
|
||||
config = {
|
||||
'DATABASE_ENGINE': 'sqlite',
|
||||
'DATABASE_DRIVER': 'pysqlite',
|
||||
'DATABASE_NAME': 'chainqueue.sqlite',
|
||||
}
|
||||
logg.debug('config {}'.format(config))
|
||||
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.poolable = False
|
||||
SessionBase.transactional = False
|
||||
SessionBase.procedural = False
|
||||
SessionBase.connect(dsn, debug=False)
|
||||
|
||||
ac = alembic.config.Config(os.path.join(migrationsdir, 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', dsn)
|
||||
ac.set_main_option('script_location', migrationsdir)
|
||||
|
||||
alembic.command.downgrade(ac, 'base')
|
||||
alembic.command.upgrade(ac, 'head')
|
||||
|
||||
|
||||
|
||||
self.session = SessionBase.create_session()
|
||||
#
|
||||
# f = open(os.path.join(script_dir, '..', 'sql', 'sqlite', '1.sql'), 'r')
|
||||
# sql = f.read()
|
||||
# f.close()
|
||||
#
|
||||
# conn = SessionBase.engine.connect()
|
||||
# conn.execute(sql)
|
||||
#
|
||||
# f = open(os.path.join(script_dir, '..', 'sql', 'sqlite', '2.sql'), 'r')
|
||||
# sql = f.read()
|
||||
# f.close()
|
||||
#
|
||||
# conn = SessionBase.engine.connect()
|
||||
# conn.execute(sql)
|
||||
#
|
||||
self.chain_spec = ChainSpec('evm', 'foo', 42, 'bar')
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
self.session.commit()
|
||||
self.session.close()
|
19
tests/test_basic.py
Normal file
19
tests/test_basic.py
Normal file
@ -0,0 +1,19 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import unittest
|
||||
|
||||
# test imports
|
||||
from tests.base import TestBase
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logg = logging.getLogger()
|
||||
|
||||
class TestBasic(TestBase):
|
||||
|
||||
def test_hello(self):
|
||||
logg.debug('foo')
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
Reference in New Issue
Block a user