adding cic-eth as sub dir
This commit is contained in:
parent
ed3991e997
commit
a4587deac5
7
apps/cic-eth/.coveragerc
Normal file
7
apps/cic-eth/.coveragerc
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
[report]
|
||||||
|
omit =
|
||||||
|
.venv/*
|
||||||
|
scripts/*
|
||||||
|
cic_eth/db/migrations/*
|
||||||
|
cic_eth/sync/head.py
|
||||||
|
cic_eth/sync/mempool.py
|
12
apps/cic-eth/.envrc_dev_example
Normal file
12
apps/cic-eth/.envrc_dev_example
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
set -a
|
||||||
|
CICTEST_DEV_KEYS_PATH=
|
||||||
|
CICTEST_DEV_SIGNER_PASSWORD=
|
||||||
|
|
||||||
|
CICTEST_SIGNER_SECRET=deadbeef
|
||||||
|
CICTEST_SIGNER_DATABASE=signer_test
|
||||||
|
|
||||||
|
CICTEST_PIP_EXTRA_INDEX_HOST=
|
||||||
|
CICTEST_PIP_EXTRA_INDEX_PORT=
|
||||||
|
CICTEST_PIP_EXTRA_INDEX_PATH=
|
||||||
|
CICTEST_PIP_EXTRA_INDEX_PROTO=
|
||||||
|
set +a
|
21
apps/cic-eth/.envrc_example
Normal file
21
apps/cic-eth/.envrc_example
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
set -a
|
||||||
|
CICTEST_BANCOR_DIR=
|
||||||
|
CICTEST_BANCOR_REGISTRY_ADDRESS=
|
||||||
|
|
||||||
|
CICTEST_ETH_WS_PROVIDER='ws://localhost:8546'
|
||||||
|
CICTEST_ETH_HTTP_PROVIDER='http://localhost:8545'
|
||||||
|
CICTEST_ETH_PROVIDER=$CIC_ETH_HTTP_PROVIDER
|
||||||
|
CICTEST_ETH_CHAIN_ID=8995
|
||||||
|
CICTEST_ETH_GAS_PROVIDER_ADDRESS=
|
||||||
|
|
||||||
|
CICTEST_DATABASE_ENGINE=postgresql
|
||||||
|
CICTEST_DATABASE_DRIVER=psycopg2
|
||||||
|
CICTEST_DATABASE_NAME=cic-eth-test
|
||||||
|
CICTEST_DATABASE_USER=postgres
|
||||||
|
CICTEST_DATABASE_PASSWORD=
|
||||||
|
CICTEST_DATABASE_HOST=localhost
|
||||||
|
CICTEST_DATABASE_PORT=5432
|
||||||
|
|
||||||
|
CICTEST_CELERY_BROKER_URL=
|
||||||
|
set +a
|
||||||
|
. .envrc_dev_example
|
8
apps/cic-eth/.gitignore
vendored
Normal file
8
apps/cic-eth/.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.envrc
|
||||||
|
.envrc_dev
|
||||||
|
.venv
|
||||||
|
__pycache__
|
||||||
|
*.pyc
|
||||||
|
_build
|
||||||
|
doc/**/*.png
|
||||||
|
doc/**/html
|
11
apps/cic-eth/.gitlab-ci.yml
Normal file
11
apps/cic-eth/.gitlab-ci.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
.contract-migration-changes-target:
|
||||||
|
rules:
|
||||||
|
- changes:
|
||||||
|
- $CONTEXT/*
|
||||||
|
|
||||||
|
build-cic-eth:
|
||||||
|
extends:
|
||||||
|
- .contract-migration-changes-target
|
||||||
|
- .py-build
|
||||||
|
variables:
|
||||||
|
CONTEXT: apps/cic-eth
|
114
apps/cic-eth/CHANGELOG
Normal file
114
apps/cic-eth/CHANGELOG
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
- 0.10.0
|
||||||
|
* Move api to subpackage
|
||||||
|
* Add version identifier to package top level
|
||||||
|
* Add gas price proxy middleware
|
||||||
|
* Make initial queue send status inactive, introduce new status READYSEND for initial send attempt
|
||||||
|
* Add maintenance mode locks, and automatic account lock when node rejects to send a tx to network
|
||||||
|
* Add nonce repair tool
|
||||||
|
* Add tx resend admin api
|
||||||
|
* Add pure tcp and redis task api callbacks
|
||||||
|
* Add optional outgoing log status tracing
|
||||||
|
* Add lock lister and lock/unlock cli tool
|
||||||
|
- 0.9.0
|
||||||
|
* Require chain spec parameter in api
|
||||||
|
* Pass chain spec between tasks
|
||||||
|
- 0.8.9
|
||||||
|
* Change executable argument flag names for contract keys and chain spec
|
||||||
|
* Take over auxiliary code from cic-registry
|
||||||
|
* Use cic-bancor package for bancor related code
|
||||||
|
- 0.8.8
|
||||||
|
* Implement transaction granularity for syncers
|
||||||
|
- 0.8.7
|
||||||
|
* Add registration option on api account creation
|
||||||
|
- 0.8.6
|
||||||
|
* Add persistent account role index
|
||||||
|
- 0.8.5
|
||||||
|
* Add refill gas api call
|
||||||
|
- 0.8.4
|
||||||
|
* Add settable abi dir to executables
|
||||||
|
- 0.8.3
|
||||||
|
* Add faucet task, tests
|
||||||
|
* Add retry tx on incoming gas refill trigger
|
||||||
|
* Add transfer approval task, tests
|
||||||
|
- 0.8.2
|
||||||
|
* Upgrade cic-registry
|
||||||
|
* Fix broken history syncer test
|
||||||
|
- 0.8.1
|
||||||
|
* Upgrade dependencies
|
||||||
|
- 0.8.0
|
||||||
|
* Rehabilitate tasker script
|
||||||
|
* Move tasker and manager scripts to console scripts setup entrypoints
|
||||||
|
- 0.7.1
|
||||||
|
* Make tx hash optional in check-gas-and-send signature builder
|
||||||
|
- 0.7.0
|
||||||
|
* Define callback tasks in API with qualified celery task strings
|
||||||
|
- 0.6.4
|
||||||
|
* Add mock tx cache data generator
|
||||||
|
* Change sql tx cache value fields to bigint
|
||||||
|
- 0.6.3
|
||||||
|
* Remove CIC-cache related code
|
||||||
|
- 0.6.2
|
||||||
|
* Rename tx_number member of TxCache object to tx_index
|
||||||
|
- 0.6.1
|
||||||
|
* Add postgres-only stored procedures for per-address balances and transactions queries
|
||||||
|
- 0.6.0
|
||||||
|
* Configurable celery in worker script
|
||||||
|
- 0.5.8
|
||||||
|
* Upgrade crypto-dev-signer (now uses SQLAlchemy)
|
||||||
|
* Use sql backend db for tests
|
||||||
|
* Use filesystem backend for celery tests
|
||||||
|
- 0.5.7
|
||||||
|
* Remove environment variable dependencies in tests
|
||||||
|
* Update dependencies
|
||||||
|
- 0.5.6
|
||||||
|
* Upgrade signer
|
||||||
|
- 0.5.5
|
||||||
|
* Implement env-prefix switch in scripts
|
||||||
|
- 0.5.4
|
||||||
|
* Add ipc path parameter to web3ext instantiation
|
||||||
|
- 0.5.3
|
||||||
|
* Add alembic migration script customizable with confini
|
||||||
|
- 0.5.2
|
||||||
|
* Upgrade crypto-dev-signer
|
||||||
|
- 0.5.1
|
||||||
|
* Move tasker back to scripts
|
||||||
|
- 0.5.0
|
||||||
|
* Create Api object, persists callback and queue across methods
|
||||||
|
- 0.4.1
|
||||||
|
* Add docstrings
|
||||||
|
* Rehabilitate tests
|
||||||
|
* Correctly scope db sessions in tasks
|
||||||
|
* Add API for create account
|
||||||
|
- 0.4.0
|
||||||
|
* Use config files instead of environment for cic-dev-tasker
|
||||||
|
- 0.3.0
|
||||||
|
* Add queues
|
||||||
|
- 0.2.0
|
||||||
|
* Add documentation
|
||||||
|
* Rename cic-dev-syncer to cic-dev-manager
|
||||||
|
* Rename cic-dev-track to cic-dev-tracker
|
||||||
|
- 0.1.1
|
||||||
|
* Remove cic_eth.eth from packaging
|
||||||
|
- 0.1.0
|
||||||
|
* Extract tx factory object controlling nonce and gasprice
|
||||||
|
* Fix bug from adapting method call to EthereumTester
|
||||||
|
* Package api for external use
|
||||||
|
* Add missing otx saves for transactions
|
||||||
|
- 0.0.6
|
||||||
|
* Add missing "approve zero" for convert in monitor queue
|
||||||
|
- 0.0.5
|
||||||
|
* Add customized eth_tester replacement for active web3 backend
|
||||||
|
- 0.0.4
|
||||||
|
* Add api caller callback for completed tasks
|
||||||
|
* Add semaphore for web3 calls
|
||||||
|
* Add missing subtasks in bancor convert chain
|
||||||
|
- 0.0.3
|
||||||
|
* Separate out registry and sempo to external packages
|
||||||
|
- 0.0.2
|
||||||
|
* Add erc20 transfer task
|
||||||
|
* Add transaction queue tasker and database
|
||||||
|
- 0.0.1
|
||||||
|
* Add registry
|
||||||
|
* Add bancor token lookup and convert tasks
|
||||||
|
* Add sempo token register tasks
|
||||||
|
* Add environment variable docs
|
674
apps/cic-eth/LICENSE.txt
Normal file
674
apps/cic-eth/LICENSE.txt
Normal file
@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
10
apps/cic-eth/cic_eth/__init__.py
Normal file
10
apps/cic-eth/cic_eth/__init__.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Celery tasks API for CIC EVM and web3 JSON-RPC interactions
|
||||||
|
|
||||||
|
.. moduleauthor: Louis Holbrook <dev@holbrook.no>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .version import (
|
||||||
|
version,
|
||||||
|
version_string,
|
||||||
|
)
|
125
apps/cic-eth/cic_eth/admin/ctrl.py
Normal file
125
apps/cic-eth/cic_eth/admin/ctrl.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
from cic_registry import zero_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
from cic_eth.db.models.lock import Lock
|
||||||
|
from cic_eth.error import LockedError
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def lock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL, tx_hash=None):
|
||||||
|
"""Task wrapper to set arbitrary locks
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param flags: Flags to set
|
||||||
|
:type flags: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
|
||||||
|
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def unlock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL):
|
||||||
|
"""Task wrapper to reset arbitrary locks
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param flags: Flags to set
|
||||||
|
:type flags: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.reset(chain_str, flags, address=address)
|
||||||
|
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def lock_send(chained_input, chain_str, address=zero_address, tx_hash=None):
|
||||||
|
"""Task wrapper to set send lock
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||||
|
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def unlock_send(chained_input, chain_str, address=zero_address):
|
||||||
|
"""Task wrapper to reset send lock
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||||
|
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def lock_queue(chained_input, chain_str, address=zero_address, tx_hash=None):
|
||||||
|
"""Task wrapper to set queue direct lock
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||||
|
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def unlock_queue(chained_input, chain_str, address=zero_address):
|
||||||
|
"""Task wrapper to reset queue direct lock
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: New lock state for address
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||||
|
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||||
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def check_lock(chained_input, chain_str, lock_flags, address=None):
|
||||||
|
r = Lock.check(chain_str, lock_flags, address=zero_address)
|
||||||
|
if address != None:
|
||||||
|
r |= Lock.check(chain_str, lock_flags, address=address)
|
||||||
|
if r > 0:
|
||||||
|
logg.debug('lock check {} has match {} for {}'.format(lock_flags, r, address))
|
||||||
|
raise LockedError(r)
|
||||||
|
return chained_input
|
12
apps/cic-eth/cic_eth/admin/debug.py
Normal file
12
apps/cic-eth/cic_eth/admin/debug.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
import celery
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def out_tmp(tag, txt):
|
||||||
|
f = open('/tmp/err.{}.txt'.format(tag), "w")
|
||||||
|
f.write(txt)
|
||||||
|
f.close()
|
128
apps/cic-eth/cic_eth/admin/nonce.py
Normal file
128
apps/cic-eth/cic_eth/admin/nonce.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
|
from cic_eth.admin.ctrl import lock_send
|
||||||
|
from cic_eth.admin.ctrl import unlock_send
|
||||||
|
from cic_eth.admin.ctrl import lock_queue
|
||||||
|
from cic_eth.admin.ctrl import unlock_queue
|
||||||
|
from cic_eth.queue.tx import get_tx
|
||||||
|
from cic_eth.queue.tx import set_cancel
|
||||||
|
from cic_eth.queue.tx import create as queue_create
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.eth.task import sign_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
|
||||||
|
"""Shift all transactions with nonces higher than the offset by the provided position delta.
|
||||||
|
|
||||||
|
Transactions who are replaced by transactions that move nonces will be marked as OVERRIDDEN.
|
||||||
|
|
||||||
|
:param chainstr: Chain specification string representation
|
||||||
|
:type chainstr: str
|
||||||
|
:param tx_hash_orig_hex: Transaction hash to resolve to sender and nonce to use as shift offset
|
||||||
|
:type tx_hash_orig_hex: str, 0x-hex
|
||||||
|
:param delta: Amount
|
||||||
|
"""
|
||||||
|
queue = None
|
||||||
|
try:
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_brief = get_tx(tx_hash_orig_hex)
|
||||||
|
tx_raw = bytes.fromhex(tx_brief['signed_tx'][2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_raw, chain_spec.chain_id())
|
||||||
|
nonce = tx_brief['nonce']
|
||||||
|
address = tx['from']
|
||||||
|
|
||||||
|
logg.debug('shifting nonce {} position(s) for address {}, offset {}'.format(delta, address, nonce))
|
||||||
|
|
||||||
|
lock_queue(None, chain_str, address)
|
||||||
|
lock_send(None, chain_str, address)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(TxCache.sender==address)
|
||||||
|
q = q.filter(Otx.nonce>=nonce+delta)
|
||||||
|
q = q.order_by(Otx.nonce.asc())
|
||||||
|
otxs = q.all()
|
||||||
|
|
||||||
|
tx_hashes = []
|
||||||
|
txs = []
|
||||||
|
for otx in otxs:
|
||||||
|
tx_raw = bytes.fromhex(otx.signed_tx[2:])
|
||||||
|
tx_new = unpack_signed_raw_tx(tx_raw, chain_spec.chain_id())
|
||||||
|
|
||||||
|
tx_previous_hash_hex = tx_new['hash']
|
||||||
|
tx_previous_nonce = tx_new['nonce']
|
||||||
|
|
||||||
|
del(tx_new['hash'])
|
||||||
|
del(tx_new['hash_unsigned'])
|
||||||
|
tx_new['nonce'] -= delta
|
||||||
|
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx_new, chain_str)
|
||||||
|
logg.debug('tx {} -> {} nonce {} -> {}'.format(tx_previous_hash_hex, tx_hash_hex, tx_previous_nonce, tx_new['nonce']))
|
||||||
|
|
||||||
|
otx = Otx(
|
||||||
|
nonce=tx_new['nonce'],
|
||||||
|
address=tx_new['from'],
|
||||||
|
tx_hash=tx_hash_hex,
|
||||||
|
signed_tx=tx_signed_raw_hex,
|
||||||
|
)
|
||||||
|
session.add(otx)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# TODO: cancel all first, then replace. Otherwise we risk two non-locked states for two different nonces.
|
||||||
|
set_cancel(tx_previous_hash_hex, True)
|
||||||
|
|
||||||
|
TxCache.clone(tx_previous_hash_hex, tx_hash_hex)
|
||||||
|
|
||||||
|
tx_hashes.append(tx_hash_hex)
|
||||||
|
txs.append(tx_signed_raw_hex)
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
txs,
|
||||||
|
chain_str,
|
||||||
|
tx_new['from'],
|
||||||
|
tx_new['gas'],
|
||||||
|
tx_hashes,
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_unlock_send = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock_send',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
tx_new['from'],
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_unlock_direct = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock_queue',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
tx_new['from'],
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_unlocks = celery.group(s_unlock_send, s_unlock_direct)
|
||||||
|
s.link(s_unlocks)
|
||||||
|
s.apply_async()
|
8
apps/cic-eth/cic_eth/api/__init__.py
Normal file
8
apps/cic-eth/cic_eth/api/__init__.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
"""Celery tasks API for CIC EVM and web3 JSON-RPC interactions
|
||||||
|
|
||||||
|
.. moduleauthor: Louis Holbrook <dev@holbrook.no>
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .api_task import Api
|
||||||
|
from .api_admin import AdminApi
|
445
apps/cic-eth/cic_eth/api/api_admin.py
Normal file
445
apps/cic-eth/cic_eth/api/api_admin.py
Normal file
@ -0,0 +1,445 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from cic_registry import zero_address
|
||||||
|
from cic_registry import zero_content
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from crypto_dev_signer.eth.web3ext import Web3 as Web3Ext
|
||||||
|
from cic_registry.error import UnknownContractError
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.error import InitializationError
|
||||||
|
from cic_eth.db.error import TxStateChangeError
|
||||||
|
from cic_eth.eth.rpc import RpcClient
|
||||||
|
from cic_eth.queue.tx import get_tx
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
|
||||||
|
app = celery.current_app
|
||||||
|
|
||||||
|
#logg = logging.getLogger(__file__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class AdminApi:
|
||||||
|
"""Provides an interface to view and manipulate existing transaction tasks and system runtime settings.
|
||||||
|
|
||||||
|
:param rpc_client: Rpc client to use for blockchain connections.
|
||||||
|
:type rpc_client: cic_eth.eth.rpc.RpcClient
|
||||||
|
:param queue: Name of worker queue to submit tasks to
|
||||||
|
:type queue: str
|
||||||
|
"""
|
||||||
|
def __init__(self, rpc_client, queue='cic-eth'):
|
||||||
|
self.rpc_client = rpc_client
|
||||||
|
self.w3 = rpc_client.w3
|
||||||
|
self.queue = queue
|
||||||
|
|
||||||
|
|
||||||
|
def unlock(self, chain_spec, address, flags=None):
|
||||||
|
s_unlock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock',
|
||||||
|
[
|
||||||
|
str(chain_spec),
|
||||||
|
flags,
|
||||||
|
address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
return s_unlock.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def lock(self, chain_spec, address, flags=None):
|
||||||
|
s_lock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.lock',
|
||||||
|
[
|
||||||
|
str(chain_spec),
|
||||||
|
flags,
|
||||||
|
address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
return s_lock.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def get_lock(self):
|
||||||
|
s_lock = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_lock',
|
||||||
|
[],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
return s_lock.apply_async().get()
|
||||||
|
|
||||||
|
|
||||||
|
def tag_account(self, tag, address_hex):
|
||||||
|
"""Persistently associate an address with a plaintext tag.
|
||||||
|
|
||||||
|
Some tags are known by the system and is used to resolve addresses to use for certain transactions.
|
||||||
|
|
||||||
|
:param tag: Address tag
|
||||||
|
:type tag: str
|
||||||
|
:param address_hex: Ethereum address to tag
|
||||||
|
:type address_hex: str, 0x-hex
|
||||||
|
:raises ValueError: Invalid checksum address
|
||||||
|
"""
|
||||||
|
if not web3.Web3.isChecksumAddress(address_hex):
|
||||||
|
raise ValueError('invalid address')
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
role = AccountRole.set(tag, address_hex)
|
||||||
|
session.add(role)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False):
|
||||||
|
logg.debug('resend {}'.format(tx_hash_hex))
|
||||||
|
s_get_tx_cache = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_tx_cache',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: This check should most likely be in resend task itself
|
||||||
|
tx_dict = s_get_tx_cache.apply_async().get()
|
||||||
|
if tx_dict['status'] in [StatusEnum.REVERTED, StatusEnum.SUCCESS, StatusEnum.CANCELLED, StatusEnum.OBSOLETED]:
|
||||||
|
raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex))
|
||||||
|
|
||||||
|
s = None
|
||||||
|
if in_place:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.tx.resend_with_higher_gas',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
chain_str,
|
||||||
|
None,
|
||||||
|
1.01,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError('resend as new not yet implemented')
|
||||||
|
|
||||||
|
if unlock:
|
||||||
|
s_gas = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock_send',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
tx_dict['sender'],
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s.link(s_gas)
|
||||||
|
|
||||||
|
return s.apply_async()
|
||||||
|
|
||||||
|
def check_nonce(self, address):
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_account_tx',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
txs = s.apply_async().get()
|
||||||
|
|
||||||
|
blocking_tx = None
|
||||||
|
blocking_nonce = None
|
||||||
|
nonce_otx = 0
|
||||||
|
for k in txs.keys():
|
||||||
|
s_get_tx = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_tx',
|
||||||
|
[
|
||||||
|
k,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
tx = s_get_tx.apply_async().get()
|
||||||
|
#tx = get_tx(k)
|
||||||
|
logg.debug('checking nonce {}'.format(tx['nonce']))
|
||||||
|
if tx['status'] in [StatusEnum.REJECTED, StatusEnum.FUBAR]:
|
||||||
|
blocking_tx = k
|
||||||
|
blocking_nonce = tx['nonce']
|
||||||
|
nonce_otx = tx['nonce']
|
||||||
|
|
||||||
|
#nonce_cache = Nonce.get(address)
|
||||||
|
nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'nonce': {
|
||||||
|
'network': nonce_w3,
|
||||||
|
'queue': nonce_otx,
|
||||||
|
#'cache': nonce_cache,
|
||||||
|
'blocking': blocking_nonce,
|
||||||
|
},
|
||||||
|
'tx': {
|
||||||
|
'blocking': blocking_tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def fix_nonce(self, address, nonce):
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_account_tx',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
txs = s.apply_async().get()
|
||||||
|
|
||||||
|
tx_hash_hex = None
|
||||||
|
for k in txs.keys():
|
||||||
|
tx_dict = get_tx(k)
|
||||||
|
if tx_dict['nonce'] == nonce:
|
||||||
|
tx_hash_hex = k
|
||||||
|
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.admin.nonce.shift_nonce',
|
||||||
|
[
|
||||||
|
str(self.rpc_client.chain_spec),
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=self.queue
|
||||||
|
)
|
||||||
|
return s_nonce.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: this is a stub, complete all checks
|
||||||
|
def ready(self):
|
||||||
|
"""Checks whether all required initializations have been performed.
|
||||||
|
|
||||||
|
:raises cic_eth.error.InitializationError: At least one setting pre-requisite has not been met.
|
||||||
|
:raises KeyError: An address provided for initialization is not known by the keystore.
|
||||||
|
"""
|
||||||
|
addr = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
|
||||||
|
if addr == zero_address:
|
||||||
|
raise InitializationError('missing account ETH_GAS_PROVIDER_ADDRESS')
|
||||||
|
|
||||||
|
self.w3.eth.sign(addr, text='666f6f')
|
||||||
|
|
||||||
|
|
||||||
|
def account(self, chain_spec, address, cols=['tx_hash', 'sender', 'recipient', 'nonce', 'block', 'tx_index', 'status', 'network_status', 'date_created'], include_sender=True, include_recipient=True):
|
||||||
|
"""Lists locally originated transactions for the given Ethereum address.
|
||||||
|
|
||||||
|
Performs a synchronous call to the Celery task responsible for performing the query.
|
||||||
|
|
||||||
|
:param address: Ethereum address to return transactions for
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param cols: Data columns to include
|
||||||
|
:type cols: list of str
|
||||||
|
"""
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_account_tx',
|
||||||
|
[address],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
txs = s.apply_async().get()
|
||||||
|
|
||||||
|
tx_dict_list = []
|
||||||
|
for tx_hash in txs.keys():
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_tx_cache',
|
||||||
|
[tx_hash],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
tx_dict = s.apply_async().get()
|
||||||
|
if tx_dict['sender'] == address and not include_sender:
|
||||||
|
logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash']))
|
||||||
|
continue
|
||||||
|
elif tx_dict['recipient'] == address and not include_recipient:
|
||||||
|
logg.debug('skipping recipient tx {}'.format(tx_dict['tx_hash']))
|
||||||
|
continue
|
||||||
|
|
||||||
|
logg.debug(tx_dict)
|
||||||
|
o = {
|
||||||
|
'nonce': tx_dict['nonce'],
|
||||||
|
'tx_hash': tx_dict['tx_hash'],
|
||||||
|
'status': tx_dict['status'],
|
||||||
|
'date_updated': tx_dict['date_updated'],
|
||||||
|
}
|
||||||
|
tx_dict_list.append(o)
|
||||||
|
|
||||||
|
return tx_dict_list
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
|
||||||
|
def tx(self, chain_spec, tx_hash=None, tx_raw=None):
|
||||||
|
"""Output local and network details about a given transaction with local origin.
|
||||||
|
|
||||||
|
If the transaction hash is given, the raw trasnaction data will be retrieved from the local transaction queue backend. Otherwise the raw transaction data must be provided directly. Only one of transaction hash and transaction data can be passed.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec of the transaction's chain context
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:param tx_hash: Transaction hash of transaction to parse and view
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param tx_raw: Signed raw transaction data to parse and view
|
||||||
|
:type tx_raw: str, 0x-hex
|
||||||
|
:raises ValueError: Both tx_hash and tx_raw are passed
|
||||||
|
:return: Transaction details
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
if tx_hash != None and tx_raw != None:
|
||||||
|
ValueError('Specify only one of hash or raw tx')
|
||||||
|
|
||||||
|
if tx_raw != None:
|
||||||
|
tx_hash = self.w3.keccak(hexstr=tx_raw).hex()
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_tx_cache',
|
||||||
|
[tx_hash],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
tx = s.apply_async().get()
|
||||||
|
|
||||||
|
source_token = None
|
||||||
|
if tx['source_token'] != zero_address:
|
||||||
|
try:
|
||||||
|
source_token = CICRegistry.get_address(chain_spec, tx['source_token']).contract
|
||||||
|
except UnknownContractError:
|
||||||
|
source_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
||||||
|
source_token = CICRegistry.add_token(chain_spec, source_token_contract)
|
||||||
|
logg.warning('unknown source token contract {}'.format(tx['source_token']))
|
||||||
|
|
||||||
|
destination_token = None
|
||||||
|
if tx['source_token'] != zero_address:
|
||||||
|
try:
|
||||||
|
destination_token = CICRegistry.get_address(chain_spec, tx['destination_token'])
|
||||||
|
except UnknownContractError:
|
||||||
|
destination_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
||||||
|
destination_token = CICRegistry.add_token(chain_spec, destination_token_contract)
|
||||||
|
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
|
||||||
|
|
||||||
|
tx['sender_description'] = 'Custodial account'
|
||||||
|
tx['recipient_description'] = 'Custodial account'
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
if len(c.w3.eth.getCode(tx['sender'])) > 0:
|
||||||
|
try:
|
||||||
|
sender_contract = CICRegistry.get_address(chain_spec, tx['sender'])
|
||||||
|
tx['sender_description'] = 'Contract {}'.format(sender_contract.identifier())
|
||||||
|
except UnknownContractError:
|
||||||
|
tx['sender_description'] = 'Unknown contract'
|
||||||
|
except KeyError as e:
|
||||||
|
tx['sender_description'] = 'Unknown contract'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.account.have',
|
||||||
|
[
|
||||||
|
tx['sender'],
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
account = t.get()
|
||||||
|
if account == None:
|
||||||
|
tx['sender_description'] = 'Unknown account'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.account.role',
|
||||||
|
[
|
||||||
|
tx['sender'],
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
role = t.get()
|
||||||
|
if role != None:
|
||||||
|
tx['sender_description'] = role
|
||||||
|
|
||||||
|
|
||||||
|
if len(c.w3.eth.getCode(tx['recipient'])) > 0:
|
||||||
|
try:
|
||||||
|
recipient_contract = CICRegistry.get_address(chain_spec, tx['recipient'])
|
||||||
|
tx['recipient_description'] = 'Contract {}'.format(recipient_contract.identifier())
|
||||||
|
except UnknownContractError as e:
|
||||||
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
|
except KeyError as e:
|
||||||
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.account.have',
|
||||||
|
[
|
||||||
|
tx['recipient'],
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
account = t.get()
|
||||||
|
if account == None:
|
||||||
|
tx['recipient_description'] = 'Unknown account'
|
||||||
|
else:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.account.role',
|
||||||
|
[
|
||||||
|
tx['recipient'],
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
role = t.get()
|
||||||
|
if role != None:
|
||||||
|
tx['recipient_description'] = role
|
||||||
|
|
||||||
|
if source_token != None:
|
||||||
|
tx['source_token_symbol'] = source_token.symbol()
|
||||||
|
tx['sender_token_balance'] = source_token.function('balanceOf')(tx['sender']).call()
|
||||||
|
|
||||||
|
if destination_token != None:
|
||||||
|
tx['destination_token_symbol'] = destination_token.symbol()
|
||||||
|
tx['recipient_token_balance'] = source_token.function('balanceOf')(tx['recipient']).call()
|
||||||
|
|
||||||
|
tx['network_status'] = 'Not submitted'
|
||||||
|
|
||||||
|
try:
|
||||||
|
c.w3.eth.getTransaction(tx_hash)
|
||||||
|
tx['network_status'] = 'Mempool'
|
||||||
|
except web3.exceptions.TransactionNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = c.w3.eth.getTransactionReceipt(tx_hash)
|
||||||
|
if r.status == 1:
|
||||||
|
tx['network_status'] = 'Confirmed'
|
||||||
|
tx['block'] = r.blockNumber
|
||||||
|
tx['tx_index'] = r.transactionIndex
|
||||||
|
else:
|
||||||
|
tx['network_status'] = 'Reverted'
|
||||||
|
except web3.exceptions.TransactionNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
tx['sender_gas_balance'] = c.w3.eth.getBalance(tx['sender'])
|
||||||
|
tx['recipient_gas_balance'] = c.w3.eth.getBalance(tx['recipient'])
|
||||||
|
|
||||||
|
tx_unpacked = unpack_signed_raw_tx(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id())
|
||||||
|
tx['gas_price'] = tx_unpacked['gasPrice']
|
||||||
|
tx['gas_limit'] = tx_unpacked['gas']
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.get_state_log',
|
||||||
|
[
|
||||||
|
tx_hash,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
tx['status_log'] = t.get()
|
||||||
|
|
||||||
|
return tx
|
431
apps/cic-eth/cic_eth/api/api_task.py
Normal file
431
apps/cic-eth/cic_eth/api/api_task.py
Normal file
@ -0,0 +1,431 @@
|
|||||||
|
"""API for cic-eth celery tasks
|
||||||
|
|
||||||
|
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||||
|
|
||||||
|
"""
|
||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
|
app = celery.current_app
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Api:
|
||||||
|
"""Creates task chains to perform well-known CIC operations.
|
||||||
|
|
||||||
|
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
|
||||||
|
|
||||||
|
:param callback_param: Static value to pass to callback
|
||||||
|
:type callback_param: str
|
||||||
|
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
|
||||||
|
:type callback_task: string
|
||||||
|
:param queue: Name of worker queue to submit tasks to
|
||||||
|
:type queue: str
|
||||||
|
"""
|
||||||
|
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop', callback_queue=None):
|
||||||
|
self.chain_str = chain_str
|
||||||
|
self.chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
self.callback_param = callback_param
|
||||||
|
self.callback_task = callback_task
|
||||||
|
self.queue = queue
|
||||||
|
logg.info('api using queue {}'.format(self.queue))
|
||||||
|
self.callback_success = None
|
||||||
|
self.callback_error = None
|
||||||
|
if callback_queue == None:
|
||||||
|
callback_queue=self.queue
|
||||||
|
|
||||||
|
if callback_param != None:
|
||||||
|
self.callback_success = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
0,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
self.callback_error = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
1,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_transfer(self, from_address, to_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
||||||
|
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens, and transfers to a specified receipient after convert has completed.
|
||||||
|
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param to_address: Ethereum address of receipient
|
||||||
|
:type to_address: str, 0x-hex
|
||||||
|
:param target_return: Estimated return from conversion
|
||||||
|
:type target_return: int
|
||||||
|
:param minimum_return: The least value of destination token return to allow
|
||||||
|
:type minimum_return: int
|
||||||
|
:param from_token_symbol: ERC20 token symbol of token being converted
|
||||||
|
:type from_token_symbol: str
|
||||||
|
:param to_token_symbol: ERC20 token symbol of token to receive
|
||||||
|
:type to_token_symbol: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[from_token_symbol, to_token_symbol],
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_convert = celery.signature(
|
||||||
|
'cic_eth.eth.bancor.convert_with_default_reserve',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
target_return,
|
||||||
|
minimum_return,
|
||||||
|
to_address,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_tokens)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_convert.link(self.callback_success)
|
||||||
|
s_tokens.link(s_convert).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_tokens.link(s_convert)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def convert(self, from_address, target_return, minimum_return, from_token_symbol, to_token_symbol):
|
||||||
|
"""Executes a chain of celery tasks that performs conversion between two ERC20 tokens.
|
||||||
|
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param target_return: Estimated return from conversion
|
||||||
|
:type target_return: int
|
||||||
|
:param minimum_return: The least value of destination token return to allow
|
||||||
|
:type minimum_return: int
|
||||||
|
:param from_token_symbol: ERC20 token symbol of token being converted
|
||||||
|
:type from_token_symbol: str
|
||||||
|
:param to_token_symbol: ERC20 token symbol of token to receive
|
||||||
|
:type to_token_symbol: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[from_token_symbol, to_token_symbol],
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_convert = celery.signature(
|
||||||
|
'cic_eth.eth.bancor.convert_with_default_reserve',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
target_return,
|
||||||
|
minimum_return,
|
||||||
|
from_address,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_tokens)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_convert.link(self.callback_success)
|
||||||
|
s_tokens.link(s_convert).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_tokens.link(s_convert)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def transfer(self, from_address, to_address, value, token_symbol):
|
||||||
|
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.
|
||||||
|
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param to_address: Ethereum address of recipient
|
||||||
|
:type to_address: str, 0x-hex
|
||||||
|
:param value: Estimated return from conversion
|
||||||
|
:type value: int
|
||||||
|
:param token_symbol: ERC20 token symbol of token to send
|
||||||
|
:type token_symbol: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[token_symbol],
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.token.transfer',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
to_address,
|
||||||
|
value,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_tokens)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_transfer.link(self.callback_success)
|
||||||
|
s_tokens.link(s_transfer).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_tokens.link(s_transfer)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_request(self, from_address, to_address, spender_address, value, token_symbol):
|
||||||
|
"""Executes a chain of celery tasks that issues a transfer request of ERC20 tokens from one address to another.
|
||||||
|
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param to_address: Ethereum address of recipient
|
||||||
|
:type to_address: str, 0x-hex
|
||||||
|
:param spender_address: Ethereum address that is executing transfer (typically an escrow contract)
|
||||||
|
:type spender_address: str, 0x-hex
|
||||||
|
:param value: Estimated return from conversion
|
||||||
|
:type value: int
|
||||||
|
:param token_symbol: ERC20 token symbol of token to send
|
||||||
|
:type token_symbol: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[token_symbol],
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens_transfer_approval = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens_approve = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_approve = celery.signature(
|
||||||
|
'cic_eth.eth.token.approve',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
spender_address,
|
||||||
|
value,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_transfer_approval = celery.signature(
|
||||||
|
'cic_eth.eth.request.transfer_approval_request',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
to_address,
|
||||||
|
value,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
# TODO: make approve and transfer_approval chainable so callback can be part of the full chain
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_transfer_approval.link(self.callback_success)
|
||||||
|
s_tokens_approve.link(s_approve)
|
||||||
|
s_tokens_transfer_approval.link(s_transfer_approval).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_tokens_approve.link(s_approve)
|
||||||
|
s_tokens_transfer_approval.link(s_transfer_approval)
|
||||||
|
|
||||||
|
g = celery.group(s_tokens_approve, s_tokens_transfer_approval) #s_tokens.apply_async(queue=self.queue)
|
||||||
|
s_check.link(g)
|
||||||
|
t = s_check.apply_async()
|
||||||
|
#t = s_tokens.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def balance(self, address, token_symbol):
|
||||||
|
"""Calls the provided callback with the current token balance of the given address.
|
||||||
|
|
||||||
|
:param address: Ethereum address of holder
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param token_symbol: ERC20 token symbol of token to send
|
||||||
|
:type token_symbol: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
if self.callback_param == None:
|
||||||
|
logg.warning('balance pointlessly called with no callback url')
|
||||||
|
|
||||||
|
s_tokens = celery.signature(
|
||||||
|
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
[token_symbol],
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_balance = celery.signature(
|
||||||
|
'cic_eth.eth.token.balance',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_balance.link(self.callback_success)
|
||||||
|
s_tokens.link(s_balance).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_tokens.link(s_balance)
|
||||||
|
|
||||||
|
t = s_tokens.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def create_account(self, password='', register=True):
|
||||||
|
"""Creates a new blockchain address encrypted with the given password, and calls the provided callback with the address of the new account.
|
||||||
|
|
||||||
|
:param password: Password to encode the password with in the backend (careful, you will have to remember it)
|
||||||
|
:type password: str
|
||||||
|
:param register: Register the new account in accounts index backend
|
||||||
|
:type password: bool
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
password,
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.CREATE,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_account = celery.signature(
|
||||||
|
'cic_eth.eth.account.create',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_account)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_account.link(self.callback_success)
|
||||||
|
|
||||||
|
if register:
|
||||||
|
s_register = celery.signature(
|
||||||
|
'cic_eth.eth.account.register',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_account.link(s_register)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def refill_gas(self, address):
|
||||||
|
"""Creates a new gas refill transaction with the registered gas provider address.
|
||||||
|
|
||||||
|
:param address: Ethereum address to send gas tokens to.
|
||||||
|
:type address: str
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
self.chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_refill = celery.signature(
|
||||||
|
'cic_eth.eth.tx.refill_gas',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_refill)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_refill.link(self.callback_success)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def ping(self, r):
|
||||||
|
"""A noop callback ping for testing purposes.
|
||||||
|
|
||||||
|
:returns: uuid of callback task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
if self.callback_param == None:
|
||||||
|
logg.warning('nothing to do')
|
||||||
|
return None
|
||||||
|
|
||||||
|
t = self.callback_success.apply_async([r])
|
||||||
|
return t
|
1
apps/cic-eth/cic_eth/callbacks/__init__.py
Normal file
1
apps/cic-eth/cic_eth/callbacks/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .callback import Callback
|
16
apps/cic-eth/cic_eth/callbacks/callback.py
Normal file
16
apps/cic-eth/cic_eth/callbacks/callback.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
class Callback(celery.Task):
|
||||||
|
"""Provides static properties for web connection context. The properties should be set directly.
|
||||||
|
"""
|
||||||
|
ssl = False
|
||||||
|
"""If true, a SSL client certificate with default protocol for standard library ssl will be used for the HTTP connection."""
|
||||||
|
ssl_cert_file = None
|
||||||
|
"""Absolute path to client certificate PEM file"""
|
||||||
|
ssl_key_file = None
|
||||||
|
"""Absolute path to client key file"""
|
||||||
|
ssl_password=None
|
||||||
|
"""Password to unlock key file"""
|
||||||
|
ssl_ca_file = None
|
||||||
|
"""Client certificate CA chain"""
|
64
apps/cic-eth/cic_eth/callbacks/http.py
Normal file
64
apps/cic-eth/cic_eth/callbacks/http.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# standard imports
|
||||||
|
import json
|
||||||
|
import ssl
|
||||||
|
import os
|
||||||
|
import urllib
|
||||||
|
from urllib import request
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from . import Callback
|
||||||
|
import celery
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = celery_app.log.get_default_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=Callback, bind=True)
|
||||||
|
def http(self, result, url, status_code):
|
||||||
|
"""A generic web callback implementation for task results.
|
||||||
|
|
||||||
|
Input parameters depend on whether the callback is used as an error callback, or as a part of a celery chain.
|
||||||
|
|
||||||
|
The callback receives:
|
||||||
|
|
||||||
|
{
|
||||||
|
'root_id': the uuid of the topmost task in the chain, which is known to the API caller.
|
||||||
|
'status': <status_code>,
|
||||||
|
'result': <result>,
|
||||||
|
}
|
||||||
|
|
||||||
|
:param result: Task context object (on error) or return value of previous task (on success)
|
||||||
|
:type result: Varies
|
||||||
|
:param url: Url to HTTP POST results to
|
||||||
|
:type url: str
|
||||||
|
:param status_code: 0 on success, any other value is error
|
||||||
|
:type status_code: int
|
||||||
|
"""
|
||||||
|
req = request.Request(url)
|
||||||
|
data = {
|
||||||
|
'root_id': self.request.root_id,
|
||||||
|
'status': status_code,
|
||||||
|
'result': result,
|
||||||
|
}
|
||||||
|
data_str = json.dumps(data)
|
||||||
|
data_bytes = data_str.encode('utf-8')
|
||||||
|
req.add_header('Content-Type', 'application/json')
|
||||||
|
req.data = data_bytes
|
||||||
|
|
||||||
|
ctx = None
|
||||||
|
if self.ssl:
|
||||||
|
ctx = ssl.SSLContext()
|
||||||
|
ctx.load_cert_chain(
|
||||||
|
self.ssl_cert_file,
|
||||||
|
self.ssl_key_file,
|
||||||
|
self.ssl_password,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = urlopen(
|
||||||
|
req,
|
||||||
|
context=ctx,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status != 200:
|
||||||
|
raise RuntimeError('Expected status 200 from remote server, but got {} {}'.format(response.status, response.msg))
|
21
apps/cic-eth/cic_eth/callbacks/noop.py
Normal file
21
apps/cic-eth/cic_eth/callbacks/noop.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import celery
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = celery_app.log.get_default_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def noop(self, result, param, status_code):
|
||||||
|
"""A noop callback for task chains executed by external api methods. Logs the callback arguments.
|
||||||
|
|
||||||
|
:param result: Task context object (on error) or return value of previous task (on success)
|
||||||
|
:type result: Varies
|
||||||
|
:param param: Static value passed from api caller
|
||||||
|
:type param: Varies
|
||||||
|
:param status_code: 0 on success, any other value is error
|
||||||
|
:type status_code: int
|
||||||
|
:returns: True
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
logg.info('noop callback {} {} {}'.format(result, param, status_code))
|
||||||
|
return True
|
24
apps/cic-eth/cic_eth/callbacks/redis.py
Normal file
24
apps/cic-eth/cic_eth/callbacks/redis.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import redis as redis_interface
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from . import Callback
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
logg = celery_app.log.get_default_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=Callback, bind=True)
|
||||||
|
def redis(self, result, destination, status_code):
|
||||||
|
(host, port, db, channel) = destination.split(':')
|
||||||
|
r = redis_interface.Redis(host=host, port=port, db=db)
|
||||||
|
s = json.dumps(result)
|
||||||
|
logg.debug('redis callback on host {} port {} db {} channel {}'.format(host, port, db, channel))
|
||||||
|
r.publish(channel, s)
|
||||||
|
r.close()
|
24
apps/cic-eth/cic_eth/callbacks/tcp.py
Normal file
24
apps/cic-eth/cic_eth/callbacks/tcp.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# standard imports
|
||||||
|
import socket
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from . import Callback
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
logg = celery_app.log.get_default_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=Callback, bind=True)
|
||||||
|
def tcp(self, result, destination, status_code):
|
||||||
|
s = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
|
||||||
|
(host, port) = destination.split(':')
|
||||||
|
logg.debug('tcp callback to {} {}'.format(host, port))
|
||||||
|
s.connect((host, int(port)))
|
||||||
|
s.send(json.dumps(result).encode('utf-8'))
|
||||||
|
s.close()
|
61
apps/cic-eth/cic_eth/db/__init__.py
Normal file
61
apps/cic-eth/cic_eth/db/__init__.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
# an Engine, which the Session will use for connection
|
||||||
|
# resources
|
||||||
|
|
||||||
|
# TODO: Remove the package exports, all models should be imported using full path
|
||||||
|
from .models.otx import Otx
|
||||||
|
from .models.convert import TxConvertTransfer
|
||||||
|
|
||||||
|
|
||||||
|
def dsn_from_config(config):
|
||||||
|
"""Generate a dsn string from the provided config dict.
|
||||||
|
|
||||||
|
The config dict must include all well-known database connection parameters, and must implement the method "get(key)" to retrieve them. Any missing parameters will be be rendered as the literal string "None"
|
||||||
|
|
||||||
|
:param config: Configuration object
|
||||||
|
:type config: Varies
|
||||||
|
:returns: dsn string
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
scheme = config.get('DATABASE_ENGINE')
|
||||||
|
if config.get('DATABASE_DRIVER') != None:
|
||||||
|
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
|
||||||
|
|
||||||
|
dsn = ''
|
||||||
|
dsn_out = ''
|
||||||
|
if config.get('DATABASE_ENGINE') == 'sqlite':
|
||||||
|
dsn = '{}:///{}'.format(
|
||||||
|
scheme,
|
||||||
|
config.get('DATABASE_NAME'),
|
||||||
|
)
|
||||||
|
dsn_out = dsn
|
||||||
|
|
||||||
|
else:
|
||||||
|
dsn = '{}://{}:{}@{}:{}/{}'.format(
|
||||||
|
scheme,
|
||||||
|
config.get('DATABASE_USER'),
|
||||||
|
config.get('DATABASE_PASSWORD'),
|
||||||
|
config.get('DATABASE_HOST'),
|
||||||
|
config.get('DATABASE_PORT'),
|
||||||
|
config.get('DATABASE_NAME'),
|
||||||
|
)
|
||||||
|
dsn_out = '{}://{}:{}@{}:{}/{}'.format(
|
||||||
|
scheme,
|
||||||
|
config.get('DATABASE_USER'),
|
||||||
|
'***',
|
||||||
|
config.get('DATABASE_HOST'),
|
||||||
|
config.get('DATABASE_PORT'),
|
||||||
|
config.get('DATABASE_NAME'),
|
||||||
|
)
|
||||||
|
logg.debug('parsed dsn from config: {}'.format(dsn_out))
|
||||||
|
return dsn
|
||||||
|
|
51
apps/cic-eth/cic_eth/db/enum.py
Normal file
51
apps/cic-eth/cic_eth/db/enum.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# standard imports
|
||||||
|
import enum
|
||||||
|
|
||||||
|
class StatusEnum(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
|
||||||
|
- Inactive, not finalized. (<0)
|
||||||
|
* PENDING: The initial state of a newly added transaction record. No action has been performed on this transaction yet.
|
||||||
|
* SENDFAIL: The transaction was not received by the node.
|
||||||
|
* RETRY: The transaction is queued for a new send attempt after previously failing.
|
||||||
|
* READYSEND: The transaction is queued for its first send attempt
|
||||||
|
* OBSOLETED: A new transaction with the same nonce and higher gas has been sent to network.
|
||||||
|
* WAITFORGAS: The transaction is on hold pending gas funding.
|
||||||
|
- Active state: (==0)
|
||||||
|
* SENT: The transaction has been sent to the mempool.
|
||||||
|
- Inactive, finalized. (>0)
|
||||||
|
* FUBAR: Unknown error occurred and transaction is abandoned. Manual intervention needed.
|
||||||
|
* CANCELLED: The transaction was sent, but was not mined and has disappered from the mempool. This usually follows a transaction being obsoleted.
|
||||||
|
* OVERRIDDEN: Transaction has been manually overriden.
|
||||||
|
* REJECTED: The transaction was rejected by the node.
|
||||||
|
* REVERTED: The transaction was mined, but exception occurred during EVM execution. (Block number will be set)
|
||||||
|
* SUCCESS: THe transaction was successfully mined. (Block number will be set)
|
||||||
|
|
||||||
|
"""
|
||||||
|
PENDING=-9
|
||||||
|
SENDFAIL=-8
|
||||||
|
RETRY=-7
|
||||||
|
READYSEND=-6
|
||||||
|
OBSOLETED=-2
|
||||||
|
WAITFORGAS=-1
|
||||||
|
SENT=0
|
||||||
|
FUBAR=1
|
||||||
|
CANCELLED=2
|
||||||
|
OVERRIDDEN=3
|
||||||
|
REJECTED=7
|
||||||
|
REVERTED=8
|
||||||
|
SUCCESS=9
|
||||||
|
|
||||||
|
|
||||||
|
class LockEnum(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
STICKY: When set, reset is not possible
|
||||||
|
CREATE: Disable creation of accounts
|
||||||
|
SEND: Disable sending to network
|
||||||
|
QUEUE: Disable queueing new or modified transactions
|
||||||
|
"""
|
||||||
|
STICKY=1
|
||||||
|
CREATE=2
|
||||||
|
SEND=4
|
||||||
|
QUEUE=8
|
||||||
|
ALL=int(0xfffffffffffffffe)
|
9
apps/cic-eth/cic_eth/db/error.py
Normal file
9
apps/cic-eth/cic_eth/db/error.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
class TxStateChangeError(Exception):
|
||||||
|
"""Raised when an invalid state change of a queued transaction occurs
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownConvertError(Exception):
|
||||||
|
"""Raised when a non-existent convert to transaction subtask is requested
|
||||||
|
"""
|
1
apps/cic-eth/cic_eth/db/migrations/default/README
Normal file
1
apps/cic-eth/cic_eth/db/migrations/default/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
85
apps/cic-eth/cic_eth/db/migrations/default/alembic.ini
Normal file
85
apps/cic-eth/cic_eth/db/migrations/default/alembic.ini
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = .
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
#sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-eth
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks=black
|
||||||
|
# black.type=console_scripts
|
||||||
|
# black.entrypoint=black
|
||||||
|
# black.options=-l 79
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
77
apps/cic-eth/cic_eth/db/migrations/default/env.py
Normal file
77
apps/cic-eth/cic_eth/db/migrations/default/env.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
apps/cic-eth/cic_eth/db/migrations/default/script.py.mako
Normal file
24
apps/cic-eth/cic_eth/db/migrations/default/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,35 @@
|
|||||||
|
"""Add new syncer table
|
||||||
|
|
||||||
|
Revision ID: 2a07b543335e
|
||||||
|
Revises: a2e2aab8f331
|
||||||
|
Create Date: 2020-12-27 09:35:44.017981
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '2a07b543335e'
|
||||||
|
down_revision = 'a2e2aab8f331'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'blockchain_sync',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('blockchain', sa.String, nullable=False),
|
||||||
|
sa.Column('block_start', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_start', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_cursor', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_cursor', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_target', sa.Integer, nullable=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('blockchain_sync')
|
@ -0,0 +1,29 @@
|
|||||||
|
"""Add nonce index
|
||||||
|
|
||||||
|
Revision ID: 49b348246d70
|
||||||
|
Revises: 52c7c59cd0b1
|
||||||
|
Create Date: 2020-12-19 09:45:36.186446
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '49b348246d70'
|
||||||
|
down_revision = '52c7c59cd0b1'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'nonce',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False, unique=True),
|
||||||
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('nonce')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add account roles
|
||||||
|
|
||||||
|
Revision ID: 52c7c59cd0b1
|
||||||
|
Revises: 9c4bd7491015
|
||||||
|
Create Date: 2020-12-19 07:21:38.249237
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '52c7c59cd0b1'
|
||||||
|
down_revision = '9c4bd7491015'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'account_role',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tag', sa.Text, nullable=False, unique=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False),
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('account_role')
|
||||||
|
pass
|
@ -0,0 +1,30 @@
|
|||||||
|
"""Add otx state log
|
||||||
|
|
||||||
|
Revision ID: 6ac7a1dadc46
|
||||||
|
Revises: 89e1e9baa53c
|
||||||
|
Create Date: 2021-01-30 13:59:49.022373
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '6ac7a1dadc46'
|
||||||
|
down_revision = '89e1e9baa53c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx_state_log',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False),
|
||||||
|
sa.Column('date', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('status', sa.Integer, nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('otx_state_log')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add attempts and version log for otx
|
||||||
|
|
||||||
|
Revision ID: 71708e943dbd
|
||||||
|
Revises: 7e8d7626e38f
|
||||||
|
Create Date: 2020-09-26 14:41:19.298651
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '71708e943dbd'
|
||||||
|
down_revision = '7e8d7626e38f'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx_attempts',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False),
|
||||||
|
sa.Column('date', sa.DateTime, nullable=False),
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('otx_attempts')
|
||||||
|
pass
|
@ -0,0 +1,31 @@
|
|||||||
|
"""add blocknumber pointer
|
||||||
|
|
||||||
|
Revision ID: 7cb65b893934
|
||||||
|
Revises: 8593fa1ca0f4
|
||||||
|
Create Date: 2020-09-24 19:29:13.543648
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7cb65b893934'
|
||||||
|
down_revision = '8593fa1ca0f4'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'watcher_state',
|
||||||
|
sa.Column('block_number', sa.Integer)
|
||||||
|
)
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('watcher_state')
|
||||||
|
pass
|
@ -0,0 +1,45 @@
|
|||||||
|
"""Add block sync
|
||||||
|
|
||||||
|
Revision ID: 7e8d7626e38f
|
||||||
|
Revises: cd2052be6db2
|
||||||
|
Create Date: 2020-09-26 11:12:27.818524
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7e8d7626e38f'
|
||||||
|
down_revision = 'cd2052be6db2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'block_sync',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('blockchain', sa.String, nullable=False, unique=True),
|
||||||
|
sa.Column('block_height_backlog', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_height_backlog', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_height_session', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_height_session', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_height_head', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_height_head', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime),
|
||||||
|
)
|
||||||
|
op.drop_table('watcher_state')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('block_sync')
|
||||||
|
op.create_table(
|
||||||
|
'watcher_state',
|
||||||
|
sa.Column('block_number', sa.Integer)
|
||||||
|
)
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);')
|
||||||
|
pass
|
@ -0,0 +1,35 @@
|
|||||||
|
"""Add transaction queue
|
||||||
|
|
||||||
|
Revision ID: 8593fa1ca0f4
|
||||||
|
Revises:
|
||||||
|
Create Date: 2020-09-22 21:56:42.117047
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '8593fa1ca0f4'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
|
sa.Column('tx_hash', sa.String(66), nullable=False),
|
||||||
|
sa.Column('signed_tx', sa.Text, nullable=False),
|
||||||
|
sa.Column('status', sa.Integer, nullable=False, default=-9),
|
||||||
|
sa.Column('block', sa.Integer),
|
||||||
|
)
|
||||||
|
op.create_index('idx_otx_tx', 'otx', ['tx_hash'], unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_otx_tx')
|
||||||
|
op.drop_table('otx')
|
@ -0,0 +1,33 @@
|
|||||||
|
"""Add account lock
|
||||||
|
|
||||||
|
Revision ID: 89e1e9baa53c
|
||||||
|
Revises: 2a07b543335e
|
||||||
|
Create Date: 2021-01-27 19:57:36.793882
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '89e1e9baa53c'
|
||||||
|
down_revision = '2a07b543335e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'lock',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column("address", sa.String(42), nullable=True),
|
||||||
|
sa.Column('blockchain', sa.String),
|
||||||
|
sa.Column("flags", sa.BIGINT(), nullable=False, default=0),
|
||||||
|
sa.Column("date_created", sa.DateTime, nullable=False),
|
||||||
|
sa.Column("otx_id", sa.Integer, nullable=True),
|
||||||
|
)
|
||||||
|
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_chain_address')
|
||||||
|
op.drop_table('lock')
|
@ -0,0 +1,26 @@
|
|||||||
|
"""Rename block sync table
|
||||||
|
|
||||||
|
Revision ID: 9c4bd7491015
|
||||||
|
Revises: 9daa16518a91
|
||||||
|
Create Date: 2020-10-15 23:45:56.306898
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9c4bd7491015'
|
||||||
|
down_revision = '9daa16518a91'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.rename_table('block_sync', 'otx_sync')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.rename_table('otx_sync', 'block_sync')
|
||||||
|
pass
|
@ -0,0 +1,30 @@
|
|||||||
|
"""add tx sync state
|
||||||
|
|
||||||
|
Revision ID: 9daa16518a91
|
||||||
|
Revises: e3b5330ee71c
|
||||||
|
Create Date: 2020-10-10 14:43:18.699276
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9daa16518a91'
|
||||||
|
down_revision = 'e3b5330ee71c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# op.create_table(
|
||||||
|
# 'tx_sync',
|
||||||
|
# sa.Column('tx', sa.String(66), nullable=False),
|
||||||
|
# )
|
||||||
|
# op.execute("INSERT INTO tx_sync VALUES('0x0000000000000000000000000000000000000000000000000000000000000000')")
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# op.drop_table('tx_sync')
|
||||||
|
pass
|
@ -0,0 +1,34 @@
|
|||||||
|
"""Add date accessed to txcache
|
||||||
|
|
||||||
|
Revision ID: a2e2aab8f331
|
||||||
|
Revises: 49b348246d70
|
||||||
|
Create Date: 2020-12-24 18:58:06.137812
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a2e2aab8f331'
|
||||||
|
down_revision = '49b348246d70'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'tx_cache',
|
||||||
|
sa.Column(
|
||||||
|
'date_checked',
|
||||||
|
sa.DateTime,
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# drop does not work withs qlite
|
||||||
|
#op.drop_column('tx_cache', 'date_checked')
|
||||||
|
pass
|
@ -0,0 +1,34 @@
|
|||||||
|
"""convert tx index
|
||||||
|
|
||||||
|
Revision ID: cd2052be6db2
|
||||||
|
Revises: 7cb65b893934
|
||||||
|
Create Date: 2020-09-24 21:20:51.580500
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'cd2052be6db2'
|
||||||
|
down_revision = '7cb65b893934'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tx_convert_transfer',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
#sa.Column('approve_tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
sa.Column('convert_tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
sa.Column('transfer_tx_hash', sa.String(66), unique=True),
|
||||||
|
# sa.Column('holder_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient_address', sa.String(42), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_tx_convert_address', 'tx_convert_transfer', ['recipient_address'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_tx_convert_address')
|
||||||
|
op.drop_table('tx_convert_transfer')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add tx tracker record
|
||||||
|
|
||||||
|
Revision ID: df19f4e69676
|
||||||
|
Revises: 71708e943dbd
|
||||||
|
Create Date: 2020-10-09 23:31:44.563498
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'df19f4e69676'
|
||||||
|
down_revision = '71708e943dbd'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# op.create_table(
|
||||||
|
# 'tx',
|
||||||
|
# sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
# sa.Column('date_added', sa.DateTime, nullable=False),
|
||||||
|
# sa.Column('tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
# sa.Column('success', sa.Boolean(), nullable=False),
|
||||||
|
# )
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# op.drop_table('tx')
|
||||||
|
pass
|
@ -0,0 +1,38 @@
|
|||||||
|
"""Add cached values for tx
|
||||||
|
|
||||||
|
Revision ID: e3b5330ee71c
|
||||||
|
Revises: df19f4e69676
|
||||||
|
Create Date: 2020-10-10 00:17:07.094893
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'e3b5330ee71c'
|
||||||
|
down_revision = 'df19f4e69676'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tx_cache',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
# sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('source_token_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('destination_token_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('sender', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient', sa.String(42), nullable=False),
|
||||||
|
sa.Column('from_value', sa.String(), nullable=False),
|
||||||
|
sa.Column('to_value', sa.String(), nullable=True),
|
||||||
|
sa.Column('block_number', sa.BIGINT(), nullable=True),
|
||||||
|
sa.Column('tx_index', sa.Integer, nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tx_cache')
|
||||||
|
pass
|
85
apps/cic-eth/cic_eth/db/migrations/postgresql/alembic.ini
Normal file
85
apps/cic-eth/cic_eth/db/migrations/postgresql/alembic.ini
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = .
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
#sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-eth
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks=black
|
||||||
|
# black.type=console_scripts
|
||||||
|
# black.entrypoint=black
|
||||||
|
# black.options=-l 79
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
77
apps/cic-eth/cic_eth/db/migrations/postgresql/env.py
Normal file
77
apps/cic-eth/cic_eth/db/migrations/postgresql/env.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
apps/cic-eth/cic_eth/db/migrations/postgresql/script.py.mako
Normal file
24
apps/cic-eth/cic_eth/db/migrations/postgresql/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,35 @@
|
|||||||
|
"""Add new syncer table
|
||||||
|
|
||||||
|
Revision ID: 2a07b543335e
|
||||||
|
Revises: a2e2aab8f331
|
||||||
|
Create Date: 2020-12-27 09:35:44.017981
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '2a07b543335e'
|
||||||
|
down_revision = 'a2e2aab8f331'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'blockchain_sync',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('blockchain', sa.String, nullable=False),
|
||||||
|
sa.Column('block_start', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_start', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_cursor', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('tx_cursor', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('block_target', sa.Integer, nullable=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('blockchain_sync')
|
@ -0,0 +1,29 @@
|
|||||||
|
"""Add nonce index
|
||||||
|
|
||||||
|
Revision ID: 49b348246d70
|
||||||
|
Revises: 52c7c59cd0b1
|
||||||
|
Create Date: 2020-12-19 09:45:36.186446
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '49b348246d70'
|
||||||
|
down_revision = '52c7c59cd0b1'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'nonce',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False, unique=True),
|
||||||
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('nonce')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add account roles
|
||||||
|
|
||||||
|
Revision ID: 52c7c59cd0b1
|
||||||
|
Revises: 9c4bd7491015
|
||||||
|
Create Date: 2020-12-19 07:21:38.249237
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '52c7c59cd0b1'
|
||||||
|
down_revision = '9c4bd7491015'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'account_role',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tag', sa.Text, nullable=False, unique=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False),
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('account_role')
|
||||||
|
pass
|
@ -0,0 +1,30 @@
|
|||||||
|
"""Add otx state log
|
||||||
|
|
||||||
|
Revision ID: 6ac7a1dadc46
|
||||||
|
Revises: 89e1e9baa53c
|
||||||
|
Create Date: 2021-01-30 13:59:49.022373
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '6ac7a1dadc46'
|
||||||
|
down_revision = '89e1e9baa53c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx_state_log',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False),
|
||||||
|
sa.Column('date', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('status', sa.Integer, nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('otx_state_log')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add attempts and version log for otx
|
||||||
|
|
||||||
|
Revision ID: 71708e943dbd
|
||||||
|
Revises: 7e8d7626e38f
|
||||||
|
Create Date: 2020-09-26 14:41:19.298651
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '71708e943dbd'
|
||||||
|
down_revision = '7e8d7626e38f'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx_attempts',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False),
|
||||||
|
sa.Column('date', sa.DateTime, nullable=False),
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('otx_attempts')
|
||||||
|
pass
|
@ -0,0 +1,31 @@
|
|||||||
|
"""add blocknumber pointer
|
||||||
|
|
||||||
|
Revision ID: 7cb65b893934
|
||||||
|
Revises: 8593fa1ca0f4
|
||||||
|
Create Date: 2020-09-24 19:29:13.543648
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7cb65b893934'
|
||||||
|
down_revision = '8593fa1ca0f4'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'watcher_state',
|
||||||
|
sa.Column('block_number', sa.Integer)
|
||||||
|
)
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('watcher_state')
|
||||||
|
pass
|
@ -0,0 +1,42 @@
|
|||||||
|
"""Add block sync
|
||||||
|
|
||||||
|
Revision ID: 7e8d7626e38f
|
||||||
|
Revises: cd2052be6db2
|
||||||
|
Create Date: 2020-09-26 11:12:27.818524
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7e8d7626e38f'
|
||||||
|
down_revision = 'cd2052be6db2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'block_sync',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('blockchain', sa.String, nullable=False, unique=True),
|
||||||
|
sa.Column('height_backlog', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('height_session', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('height_head', sa.Integer, nullable=False, default=0),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime),
|
||||||
|
)
|
||||||
|
op.drop_table('watcher_state')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('block_sync')
|
||||||
|
op.create_table(
|
||||||
|
'watcher_state',
|
||||||
|
sa.Column('block_number', sa.Integer)
|
||||||
|
)
|
||||||
|
conn = op.get_bind()
|
||||||
|
conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);')
|
||||||
|
pass
|
@ -0,0 +1,35 @@
|
|||||||
|
"""Add transaction queue
|
||||||
|
|
||||||
|
Revision ID: 8593fa1ca0f4
|
||||||
|
Revises:
|
||||||
|
Create Date: 2020-09-22 21:56:42.117047
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '8593fa1ca0f4'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'otx',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
|
sa.Column('tx_hash', sa.String(66), nullable=False),
|
||||||
|
sa.Column('signed_tx', sa.Text, nullable=False),
|
||||||
|
sa.Column('status', sa.Integer, nullable=False, default=-9),
|
||||||
|
sa.Column('block', sa.Integer),
|
||||||
|
)
|
||||||
|
op.create_index('idx_otx_tx', 'otx', ['tx_hash'], unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_otx_tx')
|
||||||
|
op.drop_table('otx')
|
@ -0,0 +1,32 @@
|
|||||||
|
"""Add account lock
|
||||||
|
|
||||||
|
Revision ID: 89e1e9baa53c
|
||||||
|
Revises: 2a07b543335e
|
||||||
|
Create Date: 2021-01-27 19:57:36.793882
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '89e1e9baa53c'
|
||||||
|
down_revision = '2a07b543335e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'lock',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column("address", sa.String(42), nullable=True),
|
||||||
|
sa.Column('blockchain', sa.String),
|
||||||
|
sa.Column("flags", sa.BIGINT(), nullable=False, default=0),
|
||||||
|
sa.Column("date_created", sa.DateTime, nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_chain_address')
|
||||||
|
op.drop_table('lock')
|
@ -0,0 +1,26 @@
|
|||||||
|
"""Rename block sync table
|
||||||
|
|
||||||
|
Revision ID: 9c4bd7491015
|
||||||
|
Revises: 9daa16518a91
|
||||||
|
Create Date: 2020-10-15 23:45:56.306898
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9c4bd7491015'
|
||||||
|
down_revision = '9daa16518a91'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.rename_table('block_sync', 'otx_sync')
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.rename_table('otx_sync', 'block_sync')
|
||||||
|
pass
|
@ -0,0 +1,30 @@
|
|||||||
|
"""add tx sync state
|
||||||
|
|
||||||
|
Revision ID: 9daa16518a91
|
||||||
|
Revises: e3b5330ee71c
|
||||||
|
Create Date: 2020-10-10 14:43:18.699276
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9daa16518a91'
|
||||||
|
down_revision = 'e3b5330ee71c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# op.create_table(
|
||||||
|
# 'tx_sync',
|
||||||
|
# sa.Column('tx', sa.String(66), nullable=False),
|
||||||
|
# )
|
||||||
|
# op.execute("INSERT INTO tx_sync VALUES('0x0000000000000000000000000000000000000000000000000000000000000000')")
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# op.drop_table('tx_sync')
|
||||||
|
pass
|
@ -0,0 +1,33 @@
|
|||||||
|
"""Add date accessed to txcache
|
||||||
|
|
||||||
|
Revision ID: a2e2aab8f331
|
||||||
|
Revises: 49b348246d70
|
||||||
|
Create Date: 2020-12-24 18:58:06.137812
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a2e2aab8f331'
|
||||||
|
down_revision = '49b348246d70'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'tx_cache',
|
||||||
|
sa.Column(
|
||||||
|
'date_checked',
|
||||||
|
sa.DateTime,
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('tx_cache', 'date_checked')
|
||||||
|
pass
|
@ -0,0 +1,34 @@
|
|||||||
|
"""convert tx index
|
||||||
|
|
||||||
|
Revision ID: cd2052be6db2
|
||||||
|
Revises: 7cb65b893934
|
||||||
|
Create Date: 2020-09-24 21:20:51.580500
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'cd2052be6db2'
|
||||||
|
down_revision = '7cb65b893934'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tx_convert_transfer',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
#sa.Column('approve_tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
sa.Column('convert_tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
sa.Column('transfer_tx_hash', sa.String(66), unique=True),
|
||||||
|
# sa.Column('holder_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient_address', sa.String(42), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_tx_convert_address', 'tx_convert_transfer', ['recipient_address'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_index('idx_tx_convert_address')
|
||||||
|
op.drop_table('tx_convert_transfer')
|
@ -0,0 +1,31 @@
|
|||||||
|
"""Add tx tracker record
|
||||||
|
|
||||||
|
Revision ID: df19f4e69676
|
||||||
|
Revises: 71708e943dbd
|
||||||
|
Create Date: 2020-10-09 23:31:44.563498
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'df19f4e69676'
|
||||||
|
down_revision = '71708e943dbd'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# op.create_table(
|
||||||
|
# 'tx',
|
||||||
|
# sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
# sa.Column('date_added', sa.DateTime, nullable=False),
|
||||||
|
# sa.Column('tx_hash', sa.String(66), nullable=False, unique=True),
|
||||||
|
# sa.Column('success', sa.Boolean(), nullable=False),
|
||||||
|
# )
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# op.drop_table('tx')
|
||||||
|
pass
|
@ -0,0 +1,38 @@
|
|||||||
|
"""Add cached values for tx
|
||||||
|
|
||||||
|
Revision ID: e3b5330ee71c
|
||||||
|
Revises: df19f4e69676
|
||||||
|
Create Date: 2020-10-10 00:17:07.094893
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'e3b5330ee71c'
|
||||||
|
down_revision = 'df19f4e69676'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tx_cache',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
# sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=True),
|
||||||
|
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
|
||||||
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('date_updated', sa.DateTime, nullable=False),
|
||||||
|
sa.Column('source_token_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('destination_token_address', sa.String(42), nullable=False),
|
||||||
|
sa.Column('sender', sa.String(42), nullable=False),
|
||||||
|
sa.Column('recipient', sa.String(42), nullable=False),
|
||||||
|
sa.Column('from_value', sa.BIGINT(), nullable=False),
|
||||||
|
sa.Column('to_value', sa.BIGINT(), nullable=True),
|
||||||
|
sa.Column('block_number', sa.BIGINT(), nullable=True),
|
||||||
|
sa.Column('tx_index', sa.Integer, nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tx_cache')
|
||||||
|
pass
|
73
apps/cic-eth/cic_eth/db/models/base.py
Normal file
73
apps/cic-eth/cic_eth/db/models/base.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, Integer
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
Model = declarative_base(name='Model')
|
||||||
|
|
||||||
|
|
||||||
|
class SessionBase(Model):
|
||||||
|
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
|
||||||
|
"""
|
||||||
|
__abstract__ = True
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
engine = None
|
||||||
|
"""Database connection engine of the running aplication"""
|
||||||
|
sessionmaker = None
|
||||||
|
"""Factory object responsible for creating sessions from the connection pool"""
|
||||||
|
transactional = True
|
||||||
|
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
|
||||||
|
poolable = True
|
||||||
|
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_session():
|
||||||
|
"""Creates a new database session.
|
||||||
|
"""
|
||||||
|
return SessionBase.sessionmaker()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _set_engine(engine):
|
||||||
|
"""Sets the database engine static property
|
||||||
|
"""
|
||||||
|
SessionBase.engine = engine
|
||||||
|
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def connect(dsn, debug=False):
|
||||||
|
"""Create new database connection engine and connect to database backend.
|
||||||
|
|
||||||
|
:param dsn: DSN string defining connection.
|
||||||
|
:type dsn: str
|
||||||
|
"""
|
||||||
|
e = None
|
||||||
|
if SessionBase.poolable:
|
||||||
|
e = create_engine(
|
||||||
|
dsn,
|
||||||
|
max_overflow=50,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_size=20,
|
||||||
|
pool_recycle=10,
|
||||||
|
echo=debug,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
e = create_engine(
|
||||||
|
dsn,
|
||||||
|
echo=debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
SessionBase._set_engine(e)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def disconnect():
|
||||||
|
"""Disconnect from database and free resources.
|
||||||
|
"""
|
||||||
|
SessionBase.engine.dispose()
|
||||||
|
SessionBase.engine = None
|
55
apps/cic-eth/cic_eth/db/models/convert.py
Normal file
55
apps/cic-eth/cic_eth/db/models/convert.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, Enum, String, Integer
|
||||||
|
from sqlalchemy.ext.hybrid import hybrid_method
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import SessionBase
|
||||||
|
from ..error import UnknownConvertError
|
||||||
|
|
||||||
|
|
||||||
|
class TxConvertTransfer(SessionBase):
|
||||||
|
"""Table describing a transfer of funds after conversion has been successfully performed.
|
||||||
|
|
||||||
|
:param convert_tx_hash: Transaction hash of convert transaction
|
||||||
|
:type convert_tx_hash: str, 0x-hex
|
||||||
|
:param recipient_address: Ethereum address of recipient of resulting token balance of conversion
|
||||||
|
:type recipient_address: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
"""
|
||||||
|
__tablename__ = 'tx_convert_transfer'
|
||||||
|
|
||||||
|
#approve_tx_hash = Column(String(66))
|
||||||
|
convert_tx_hash = Column(String(66))
|
||||||
|
transfer_tx_hash = Column(String(66))
|
||||||
|
recipient_address = Column(String(42))
|
||||||
|
|
||||||
|
|
||||||
|
@hybrid_method
|
||||||
|
def transfer(self, transfer_tx_hash):
|
||||||
|
"""Persists transaction hash of performed transfer. Setting this ends the lifetime of this record.
|
||||||
|
"""
|
||||||
|
self.transfer_tx_hash = transfer_tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(convert_tx_hash):
|
||||||
|
"""Retrieves a convert transfer record by conversion transaction hash in a separate session.
|
||||||
|
|
||||||
|
:param convert_tx_hash: Transaction hash of convert transaction
|
||||||
|
:type convert_tx_hash: str, 0x-hex
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(TxConvertTransfer)
|
||||||
|
q = q.filter(TxConvertTransfer.convert_tx_hash==convert_tx_hash)
|
||||||
|
r = q.first()
|
||||||
|
session.close()
|
||||||
|
if r == None:
|
||||||
|
raise UnknownConvertError(convert_tx_hash)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, convert_tx_hash, recipient_address, chain_str):
|
||||||
|
self.convert_tx_hash = convert_tx_hash
|
||||||
|
self.recipient_address = recipient_address
|
||||||
|
self.chain_str = chain_str
|
190
apps/cic-eth/cic_eth/db/models/lock.py
Normal file
190
apps/cic-eth/cic_eth/db/models/lock.py
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
|
||||||
|
from cic_registry import zero_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class Lock(SessionBase):
|
||||||
|
"""Deactivate functionality on a global or per-account basis
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "lock"
|
||||||
|
|
||||||
|
blockchain = Column(String)
|
||||||
|
address = Column(String, ForeignKey('tx_cache.sender'))
|
||||||
|
flags = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
otx_id = Column(Integer, ForeignKey('otx.id'))
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
"""Get chain the cached instance represents.
|
||||||
|
"""
|
||||||
|
return self.blockchain
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set(chain_str, flags, address=zero_address, session=None, tx_hash=None):
|
||||||
|
"""Sets flags associated with the given address and chain.
|
||||||
|
|
||||||
|
If a flags entry does not exist it is created.
|
||||||
|
|
||||||
|
Does not validate the address against any other tables or components.
|
||||||
|
|
||||||
|
Valid flags can be found in cic_eth.db.enum.LockEnum.
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type str: str
|
||||||
|
:param flags: Flags to set
|
||||||
|
:type flags: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param session: Database session, if None a separate session will be used.
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:returns: New flag state of entry
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = localsession.query(Lock)
|
||||||
|
#q = q.join(TxCache, isouter=True)
|
||||||
|
q = q.filter(Lock.address==address)
|
||||||
|
q = q.filter(Lock.blockchain==chain_str)
|
||||||
|
lock = q.first()
|
||||||
|
|
||||||
|
if lock == None:
|
||||||
|
lock = Lock()
|
||||||
|
lock.flags = 0
|
||||||
|
lock.address = address
|
||||||
|
lock.blockchain = chain_str
|
||||||
|
if tx_hash != None:
|
||||||
|
q = localsession.query(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
otx = q.first()
|
||||||
|
if otx != None:
|
||||||
|
lock.otx_id = otx.id
|
||||||
|
|
||||||
|
lock.flags |= flags
|
||||||
|
r = lock.flags
|
||||||
|
|
||||||
|
localsession.add(lock)
|
||||||
|
localsession.commit()
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reset(chain_str, flags, address=zero_address, session=None):
|
||||||
|
"""Resets flags associated with the given address and chain.
|
||||||
|
|
||||||
|
If the resulting flags entry value is 0, the entry will be deleted.
|
||||||
|
|
||||||
|
Does not validate the address against any other tables or components.
|
||||||
|
|
||||||
|
Valid flags can be found in cic_eth.db.enum.LockEnum.
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type str: str
|
||||||
|
:param flags: Flags to set
|
||||||
|
:type flags: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param session: Database session, if None a separate session will be used.
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:returns: New flag state of entry
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = localsession.query(Lock)
|
||||||
|
#q = q.join(TxCache, isouter=True)
|
||||||
|
q = q.filter(Lock.address==address)
|
||||||
|
q = q.filter(Lock.blockchain==chain_str)
|
||||||
|
lock = q.first()
|
||||||
|
|
||||||
|
r = 0
|
||||||
|
if lock != None:
|
||||||
|
lock.flags &= ~flags
|
||||||
|
if lock.flags == 0:
|
||||||
|
localsession.delete(lock)
|
||||||
|
else:
|
||||||
|
localsession.add(lock)
|
||||||
|
r = lock.flags
|
||||||
|
localsession.commit()
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check(chain_str, flags, address=zero_address, session=None):
|
||||||
|
"""Checks whether all given flags are set for given address and chain.
|
||||||
|
|
||||||
|
Does not validate the address against any other tables or components.
|
||||||
|
|
||||||
|
Valid flags can be found in cic_eth.db.enum.LockEnum.
|
||||||
|
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type str: str
|
||||||
|
:param flags: Flags to set
|
||||||
|
:type flags: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param session: Database session, if None a separate session will be used.
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:returns: Returns the value of all flags matched
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = localsession.query(Lock)
|
||||||
|
#q = q.join(TxCache, isouter=True)
|
||||||
|
q = q.filter(Lock.address==address)
|
||||||
|
q = q.filter(Lock.blockchain==chain_str)
|
||||||
|
q = q.filter(Lock.flags.op('&')(flags)==flags)
|
||||||
|
lock = q.first()
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
r = 0
|
||||||
|
if lock != None:
|
||||||
|
r = lock.flags & flags
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def check_aggregate(chain_str, flags, address, session=None):
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
r = Lock.check(chain_str, flags, session=localsession)
|
||||||
|
r |= Lock.check(chain_str, flags, address=address, session=localsession)
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
return r
|
86
apps/cic-eth/cic_eth/db/models/nonce.py
Normal file
86
apps/cic-eth/cic_eth/db/models/nonce.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, String, Integer
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class Nonce(SessionBase):
|
||||||
|
"""Provides thread-safe nonce increments.
|
||||||
|
"""
|
||||||
|
__tablename__ = 'nonce'
|
||||||
|
|
||||||
|
nonce = Column(Integer)
|
||||||
|
address_hex = Column(String(42))
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(address, session=None):
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
|
||||||
|
q = localsession.query(Nonce)
|
||||||
|
q = q.filter(Nonce.address_hex==address)
|
||||||
|
nonce = q.first()
|
||||||
|
|
||||||
|
nonce_value = None
|
||||||
|
if nonce != None:
|
||||||
|
nonce_value = nonce.nonce;
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
return nonce_value
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get(conn, address):
|
||||||
|
r = conn.execute("SELECT nonce FROM nonce WHERE address_hex = '{}'".format(address))
|
||||||
|
nonce = r.fetchone()
|
||||||
|
if nonce == None:
|
||||||
|
return None
|
||||||
|
return nonce[0]
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __set(conn, address, nonce):
|
||||||
|
conn.execute("UPDATE nonce set nonce = {} WHERE address_hex = '{}'".format(nonce, address))
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def next(address, initial_if_not_exists=0):
|
||||||
|
"""Generate next nonce for the given address.
|
||||||
|
|
||||||
|
If there is no previous nonce record for the address, the nonce may be initialized to a specified value, or 0 if no value has been given.
|
||||||
|
|
||||||
|
:param address: Associate Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param initial_if_not_exists: Initial nonce value to set if no record exists
|
||||||
|
:type initial_if_not_exists: number
|
||||||
|
:returns: Nonce
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
conn = Nonce.engine.connect()
|
||||||
|
if Nonce.transactional:
|
||||||
|
conn.execute('BEGIN')
|
||||||
|
conn.execute('LOCK TABLE nonce IN SHARE ROW EXCLUSIVE MODE')
|
||||||
|
nonce = Nonce.__get(conn, address)
|
||||||
|
logg.debug('get nonce {} for address {}'.format(nonce, address))
|
||||||
|
if nonce == None:
|
||||||
|
nonce = initial_if_not_exists
|
||||||
|
conn.execute("INSERT INTO nonce (nonce, address_hex) VALUES ({}, '{}')".format(nonce, address))
|
||||||
|
logg.debug('setting default nonce to {} for address {}'.format(nonce, address))
|
||||||
|
Nonce.__set(conn, address, nonce+1)
|
||||||
|
if Nonce.transactional:
|
||||||
|
conn.execute('COMMIT')
|
||||||
|
conn.close()
|
||||||
|
return nonce
|
||||||
|
|
||||||
|
|
455
apps/cic-eth/cic_eth/db/models/otx.py
Normal file
455
apps/cic-eth/cic_eth/db/models/otx.py
Normal file
@ -0,0 +1,455 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, Enum, String, Integer, DateTime, Text, or_, ForeignKey
|
||||||
|
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import SessionBase
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db.error import TxStateChangeError
|
||||||
|
#from cic_eth.eth.util import address_hex_from_signed_tx
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class OtxStateLog(SessionBase):
|
||||||
|
|
||||||
|
__tablename__ = 'otx_state_log'
|
||||||
|
|
||||||
|
date = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
status = Column(Integer)
|
||||||
|
otx_id = Column(Integer, ForeignKey('otx.id'))
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, otx):
|
||||||
|
self.otx_id = otx.id
|
||||||
|
self.status = otx.status
|
||||||
|
|
||||||
|
|
||||||
|
class Otx(SessionBase):
|
||||||
|
"""Outgoing transactions with local origin.
|
||||||
|
|
||||||
|
:param nonce: Transaction nonce
|
||||||
|
:type nonce: number
|
||||||
|
:param address: Ethereum address of recipient - NOT IN USE, REMOVE
|
||||||
|
:type address: str
|
||||||
|
:param tx_hash: Tranasction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param signed_tx: Signed raw transaction data
|
||||||
|
:type signed_tx: str, 0x-hex
|
||||||
|
"""
|
||||||
|
__tablename__ = 'otx'
|
||||||
|
|
||||||
|
tracing = False
|
||||||
|
"""Whether to enable queue state tracing"""
|
||||||
|
|
||||||
|
nonce = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
tx_hash = Column(String(66))
|
||||||
|
signed_tx = Column(Text)
|
||||||
|
status = Column(Integer)
|
||||||
|
block = Column(Integer)
|
||||||
|
|
||||||
|
|
||||||
|
def __set_status(self, status, session=None):
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
self.status = status
|
||||||
|
localsession.add(self)
|
||||||
|
localsession.flush()
|
||||||
|
|
||||||
|
if self.tracing:
|
||||||
|
self.__state_log(session=localsession)
|
||||||
|
|
||||||
|
if session==None:
|
||||||
|
localsession.commit()
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
|
||||||
|
def set_block(self, block, session=None):
|
||||||
|
"""Set block number transaction was mined in.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:param block: Block number
|
||||||
|
:type block: number
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
if self.block != None:
|
||||||
|
raise TxStateChangeError('Attempted set block {} when block was already {}'.format(block, self.block))
|
||||||
|
self.block = block
|
||||||
|
localsession.add(self)
|
||||||
|
localsession.flush()
|
||||||
|
|
||||||
|
if session==None:
|
||||||
|
localsession.commit()
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
|
||||||
|
def waitforgas(self, session=None):
|
||||||
|
"""Marks transaction as suspended pending gas funding.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if self.status >= StatusEnum.SENT.value:
|
||||||
|
raise TxStateChangeError('WAITFORGAS cannot succeed final state, had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.WAITFORGAS, session)
|
||||||
|
|
||||||
|
|
||||||
|
def fubar(self, session=None):
|
||||||
|
"""Marks transaction as "fubar." Any transaction marked this way is an anomaly and may be a symptom of a serious problem.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
"""
|
||||||
|
self.__set_status(StatusEnum.FUBAR, session)
|
||||||
|
|
||||||
|
|
||||||
|
def reject(self, session=None):
|
||||||
|
"""Marks transaction as "rejected," which means the node rejected sending the transaction to the network. The nonce has not been spent, and the transaction should be replaced.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
"""
|
||||||
|
if self.status >= StatusEnum.SENT.value:
|
||||||
|
raise TxStateChangeError('REJECTED cannot succeed SENT or final state, had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.REJECTED, session)
|
||||||
|
|
||||||
|
|
||||||
|
def override(self, session=None):
|
||||||
|
"""Marks transaction as manually overridden.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
"""
|
||||||
|
if self.status >= StatusEnum.SENT.value:
|
||||||
|
raise TxStateChangeError('OVERRIDDEN cannot succeed SENT or final state, had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.OVERRIDDEN, session)
|
||||||
|
|
||||||
|
|
||||||
|
def retry(self, session=None):
|
||||||
|
"""Marks transaction as ready to retry after a timeout following a sendfail or a completed gas funding.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if self.status != StatusEnum.SENT.value and self.status != StatusEnum.SENDFAIL.value:
|
||||||
|
raise TxStateChangeError('RETRY must follow SENT or SENDFAIL, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.RETRY, session)
|
||||||
|
|
||||||
|
|
||||||
|
def readysend(self, session=None):
|
||||||
|
"""Marks transaction as ready for initial send attempt.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if self.status != StatusEnum.PENDING.value and self.status != StatusEnum.WAITFORGAS.value:
|
||||||
|
raise TxStateChangeError('READYSEND must follow PENDING or WAITFORGAS, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.READYSEND, session)
|
||||||
|
|
||||||
|
|
||||||
|
def sent(self, session=None):
|
||||||
|
"""Marks transaction as having been sent to network.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if self.status > StatusEnum.SENT:
|
||||||
|
raise TxStateChangeError('SENT after {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.SENT, session)
|
||||||
|
|
||||||
|
|
||||||
|
def sendfail(self, session=None):
|
||||||
|
"""Marks that an attempt to send the transaction to the network has failed.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if self.status not in [StatusEnum.PENDING, StatusEnum.SENT, StatusEnum.WAITFORGAS]:
|
||||||
|
raise TxStateChangeError('SENDFAIL must follow SENT or PENDING, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.SENDFAIL, session)
|
||||||
|
|
||||||
|
|
||||||
|
def minefail(self, block, session=None):
|
||||||
|
"""Marks that transaction was mined but code execution did not succeed.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:param block: Block number transaction was mined in.
|
||||||
|
:type block: number
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if block != None:
|
||||||
|
self.block = block
|
||||||
|
if self.status != StatusEnum.SENT:
|
||||||
|
logg.warning('REVERTED should follow SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
#if self.status != StatusEnum.PENDING and self.status != StatusEnum.OBSOLETED and self.status != StatusEnum.SENT:
|
||||||
|
#if self.status > StatusEnum.SENT:
|
||||||
|
# raise TxStateChangeError('REVERTED must follow OBSOLETED, PENDING or SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.REVERTED, session)
|
||||||
|
|
||||||
|
|
||||||
|
def cancel(self, confirmed=False, session=None):
|
||||||
|
"""Marks that the transaction has been succeeded by a new transaction with same nonce.
|
||||||
|
|
||||||
|
If set to confirmed, the previous state must be OBSOLETED, and will transition to CANCELLED - a finalized state. Otherwise, the state must follow a non-finalized state, and will be set to OBSOLETED.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:param confirmed: Whether transition is to a final state.
|
||||||
|
:type confirmed: bool
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
if confirmed:
|
||||||
|
if self.status != StatusEnum.OBSOLETED:
|
||||||
|
logg.warning('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
#raise TxStateChangeError('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.CANCELLED, session)
|
||||||
|
elif self.status != StatusEnum.OBSOLETED:
|
||||||
|
if self.status > StatusEnum.SENT:
|
||||||
|
logg.warning('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
#raise TxStateChangeError('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.OBSOLETED, session)
|
||||||
|
|
||||||
|
|
||||||
|
def success(self, block, session=None):
|
||||||
|
"""Marks that transaction was successfully mined.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:param block: Block number transaction was mined in.
|
||||||
|
:type block: number
|
||||||
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if block != None:
|
||||||
|
self.block = block
|
||||||
|
if self.status != StatusEnum.SENT:
|
||||||
|
logg.error('SUCCESS should follow SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
#raise TxStateChangeError('SUCCESS must follow SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||||
|
self.__set_status(StatusEnum.SUCCESS, session)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(status=0, limit=4096, status_exact=True):
|
||||||
|
"""Returns outgoing transaction lists by status.
|
||||||
|
|
||||||
|
Status may either be matched exactly, or be an upper bound of the integer value of the status enum.
|
||||||
|
|
||||||
|
:param status: Status value to use in query
|
||||||
|
:type status: cic_eth.db.enum.StatusEnum
|
||||||
|
:param limit: Max results to return
|
||||||
|
:type limit: number
|
||||||
|
:param status_exact: Whether or not to perform exact status match
|
||||||
|
:type bool:
|
||||||
|
:returns: List of transaction hashes
|
||||||
|
:rtype: tuple, where first element is transaction hash
|
||||||
|
"""
|
||||||
|
e = None
|
||||||
|
session = Otx.create_session()
|
||||||
|
if status_exact:
|
||||||
|
e = session.query(Otx.tx_hash).filter(Otx.status==status).order_by(Otx.date_created.asc()).limit(limit).all()
|
||||||
|
else:
|
||||||
|
e = session.query(Otx.tx_hash).filter(Otx.status<=status).order_by(Otx.date_created.asc()).limit(limit).all()
|
||||||
|
session.close()
|
||||||
|
return e
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(tx_hash):
|
||||||
|
"""Retrieves the outgoing transaction record by transaction hash.
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
"""
|
||||||
|
session = Otx.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
session.close()
|
||||||
|
return q.first()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def account(account_address):
|
||||||
|
"""Retrieves all transaction hashes for which the given Ethereum address is sender or recipient.
|
||||||
|
|
||||||
|
:param account_address: Ethereum address to use in query.
|
||||||
|
:type account_address: str, 0x-hex
|
||||||
|
:returns: Outgoing transactions
|
||||||
|
:rtype: tuple, where first element is transaction hash
|
||||||
|
"""
|
||||||
|
session = Otx.create_session()
|
||||||
|
q = session.query(Otx.tx_hash)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(or_(TxCache.sender==account_address, TxCache.recipient==account_address))
|
||||||
|
txs = q.all()
|
||||||
|
session.close()
|
||||||
|
return list(txs)
|
||||||
|
|
||||||
|
|
||||||
|
def __state_log(self, session):
|
||||||
|
l = OtxStateLog(self)
|
||||||
|
session.add(l)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def add(nonce, address, tx_hash, signed_tx, session=None):
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
otx = Otx(nonce, address, tx_hash, signed_tx)
|
||||||
|
localsession.add(otx)
|
||||||
|
localsession.flush()
|
||||||
|
if otx.tracing:
|
||||||
|
otx.__state_log(session=localsession)
|
||||||
|
localsession.flush()
|
||||||
|
|
||||||
|
if session==None:
|
||||||
|
localsession.commit()
|
||||||
|
localsession.close()
|
||||||
|
return None
|
||||||
|
|
||||||
|
return otx
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, nonce, address, tx_hash, signed_tx):
|
||||||
|
self.nonce = nonce
|
||||||
|
self.tx_hash = tx_hash
|
||||||
|
self.signed_tx = signed_tx
|
||||||
|
self.status = StatusEnum.PENDING
|
||||||
|
signed_tx_bytes = bytes.fromhex(signed_tx[2:])
|
||||||
|
|
||||||
|
# sender_address = address_hex_from_signed_tx(signed_tx_bytes)
|
||||||
|
# logg.debug('decoded tx {}'.format(sender_address))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Most of the methods on this object are obsolete, but it contains a static function for retrieving "expired" outgoing transactions that should be moved to Otx instead.
|
||||||
|
class OtxSync(SessionBase):
|
||||||
|
"""Obsolete
|
||||||
|
"""
|
||||||
|
__tablename__ = 'otx_sync'
|
||||||
|
|
||||||
|
blockchain = Column(String)
|
||||||
|
block_height_backlog = Column(Integer)
|
||||||
|
tx_height_backlog = Column(Integer)
|
||||||
|
block_height_session = Column(Integer)
|
||||||
|
tx_height_session = Column(Integer)
|
||||||
|
block_height_head = Column(Integer)
|
||||||
|
tx_height_head = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
date_updated = Column(DateTime)
|
||||||
|
|
||||||
|
|
||||||
|
def backlog(self, block_height=None, tx_height=None):
|
||||||
|
#session = OtxSync.create_session()
|
||||||
|
if block_height != None:
|
||||||
|
if tx_height == None:
|
||||||
|
raise ValueError('tx height missing')
|
||||||
|
self.block_height_backlog = block_height
|
||||||
|
self.tx_height_backlog = tx_height
|
||||||
|
#session.add(self)
|
||||||
|
self.date_updated = datetime.datetime.utcnow()
|
||||||
|
#session.commit()
|
||||||
|
block_height = self.block_height_backlog
|
||||||
|
tx_height = self.tx_height_backlog
|
||||||
|
#session.close()
|
||||||
|
return (block_height, tx_height)
|
||||||
|
|
||||||
|
|
||||||
|
def session(self, block_height=None, tx_height=None):
|
||||||
|
#session = OtxSync.create_session()
|
||||||
|
if block_height != None:
|
||||||
|
if tx_height == None:
|
||||||
|
raise ValueError('tx height missing')
|
||||||
|
self.block_height_session = block_height
|
||||||
|
self.tx_height_session = tx_height
|
||||||
|
#session.add(self)
|
||||||
|
self.date_updated = datetime.datetime.utcnow()
|
||||||
|
#session.commit()
|
||||||
|
block_height = self.block_height_session
|
||||||
|
tx_height = self.tx_height_session
|
||||||
|
#session.close()
|
||||||
|
return (block_height, tx_height)
|
||||||
|
|
||||||
|
|
||||||
|
def head(self, block_height=None, tx_height=None):
|
||||||
|
#session = OtxSync.create_session()
|
||||||
|
if block_height != None:
|
||||||
|
if tx_height == None:
|
||||||
|
raise ValueError('tx height missing')
|
||||||
|
self.block_height_head = block_height
|
||||||
|
self.tx_height_head = tx_height
|
||||||
|
#session.add(self)
|
||||||
|
self.date_updated = datetime.datetime.utcnow()
|
||||||
|
#session.commit()
|
||||||
|
block_height = self.block_height_head
|
||||||
|
tx_height = self.tx_height_head
|
||||||
|
#session.close()
|
||||||
|
return (block_height, tx_height)
|
||||||
|
|
||||||
|
|
||||||
|
@hybrid_property
|
||||||
|
def synced(self):
|
||||||
|
#return self.block_height_session == self.block_height_backlog and self.tx_height_session == self.block_height_backlog
|
||||||
|
return self.block_height_session == self.block_height_backlog and self.tx_height_session == self.tx_height_backlog
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(blockchain_string, session):
|
||||||
|
q = session.query(OtxSync)
|
||||||
|
q = q.filter(OtxSync.blockchain==blockchain_string)
|
||||||
|
return q.first()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def latest(nonce):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
otx = session.query(Otx).filter(Otx.nonce==nonce).order_by(Otx.created.desc()).first()
|
||||||
|
session.close()
|
||||||
|
return otx
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_expired(datetime_threshold):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.filter(Otx.date_created<datetime_threshold)
|
||||||
|
q = q.filter(Otx.status==StatusEnum.SENT)
|
||||||
|
q = q.order_by(Otx.date_created.desc())
|
||||||
|
q = q.group_by(Otx.nonce)
|
||||||
|
q = q.group_by(Otx.id)
|
||||||
|
otxs = q.all()
|
||||||
|
session.close()
|
||||||
|
return otxs
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
return self.blockchain
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, blockchain):
|
||||||
|
self.blockchain = blockchain
|
||||||
|
self.block_height_head = 0
|
||||||
|
self.tx_height_head = 0
|
||||||
|
self.block_height_session = 0
|
||||||
|
self.tx_height_session = 0
|
||||||
|
self.block_height_backlog = 0
|
||||||
|
self.tx_height_backlog = 0
|
||||||
|
|
||||||
|
|
||||||
|
|
117
apps/cic-eth/cic_eth/db/models/role.py
Normal file
117
apps/cic-eth/cic_eth/db/models/role.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, String, Text
|
||||||
|
from cic_registry import zero_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class AccountRole(SessionBase):
|
||||||
|
"""Key-value store providing plaintext tags for Ethereum addresses.
|
||||||
|
|
||||||
|
Address is initialized to the zero-address
|
||||||
|
|
||||||
|
:param tag: Tag
|
||||||
|
:type tag: str
|
||||||
|
"""
|
||||||
|
__tablename__ = 'account_role'
|
||||||
|
|
||||||
|
tag = Column(Text)
|
||||||
|
address_hex = Column(String(42))
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_address(tag):
|
||||||
|
"""Get Ethereum address matching the given tag
|
||||||
|
|
||||||
|
:param tag: Tag
|
||||||
|
:type tag: str
|
||||||
|
:returns: Ethereum address, or zero-address if tag does not exist
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
role = AccountRole.get_role(tag)
|
||||||
|
if role == None:
|
||||||
|
return zero_address
|
||||||
|
return role.address_hex
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_role(tag):
|
||||||
|
"""Get AccountRole model object matching the given tag
|
||||||
|
|
||||||
|
:param tag: Tag
|
||||||
|
:type tag: str
|
||||||
|
:returns: Role object, if found
|
||||||
|
:rtype: cic_eth.db.models.role.AccountRole
|
||||||
|
"""
|
||||||
|
session = AccountRole.create_session()
|
||||||
|
role = AccountRole.__get_role(session, tag)
|
||||||
|
session.close()
|
||||||
|
#return role.address_hex
|
||||||
|
return role
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __get_role(session, tag):
|
||||||
|
return session.query(AccountRole).filter(AccountRole.tag==tag).first()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set(tag, address_hex):
|
||||||
|
"""Persist a tag to Ethereum address association.
|
||||||
|
|
||||||
|
This will silently overwrite the existing value.
|
||||||
|
|
||||||
|
:param tag: Tag
|
||||||
|
:type tag: str
|
||||||
|
:param address_hex: Ethereum address
|
||||||
|
:type address_hex: str, 0x-hex
|
||||||
|
:returns: Role object
|
||||||
|
:rtype: cic_eth.db.models.role.AccountRole
|
||||||
|
"""
|
||||||
|
#session = AccountRole.create_session()
|
||||||
|
#role = AccountRole.__get(session, tag)
|
||||||
|
role = AccountRole.get_role(tag) #session, tag)
|
||||||
|
if role == None:
|
||||||
|
role = AccountRole(tag)
|
||||||
|
role.address_hex = address_hex
|
||||||
|
#session.add(role)
|
||||||
|
#session.commit()
|
||||||
|
#session.close()
|
||||||
|
return role #address_hex
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def role_for(address, session=None):
|
||||||
|
"""Retrieve role for the given address
|
||||||
|
|
||||||
|
:param address: Ethereum address to match role for
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: Role tag, or None if no match
|
||||||
|
:rtype: str or None
|
||||||
|
"""
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = localsession.query(AccountRole)
|
||||||
|
q = q.filter(AccountRole.address_hex==address)
|
||||||
|
role = q.first()
|
||||||
|
tag = None
|
||||||
|
if role != None:
|
||||||
|
tag = role.tag
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
return tag
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, tag):
|
||||||
|
self.tag = tag
|
||||||
|
self.address_hex = zero_address
|
168
apps/cic-eth/cic_eth/db/models/sync.py
Normal file
168
apps/cic-eth/cic_eth/db/models/sync.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, String, Integer, DateTime, Text, Boolean
|
||||||
|
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
|
||||||
|
class BlockchainSync(SessionBase):
|
||||||
|
"""Syncer control backend.
|
||||||
|
|
||||||
|
:param chain: Chain spec string representation
|
||||||
|
:type chain: str
|
||||||
|
:param block_start: Block number to start sync from
|
||||||
|
:type block_start: number
|
||||||
|
:param tx_start: Block transaction number to start sync from
|
||||||
|
:type tx_start: number
|
||||||
|
:param block_target: Block number to sync until, inclusive
|
||||||
|
:type block_target: number
|
||||||
|
"""
|
||||||
|
__tablename__ = 'blockchain_sync'
|
||||||
|
|
||||||
|
blockchain = Column(String)
|
||||||
|
block_start = Column(Integer)
|
||||||
|
tx_start = Column(Integer)
|
||||||
|
block_cursor = Column(Integer)
|
||||||
|
tx_cursor = Column(Integer)
|
||||||
|
block_target = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
date_updated = Column(DateTime)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def first(chain, session=None):
|
||||||
|
"""Check if a sync session for the specified chain already exists.
|
||||||
|
|
||||||
|
:param chain: Chain spec string representation
|
||||||
|
:type chain: str
|
||||||
|
:param session: Session to use. If not specified, a separate session will be created for this method only.
|
||||||
|
:type session: SqlAlchemy Session
|
||||||
|
:returns: True if sync record found
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
local_session = False
|
||||||
|
if session == None:
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
local_session = True
|
||||||
|
q = session.query(BlockchainSync.id)
|
||||||
|
q = q.filter(BlockchainSync.blockchain==chain)
|
||||||
|
o = q.first()
|
||||||
|
if local_session:
|
||||||
|
session.close()
|
||||||
|
return o == None
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_last_live_height(current, session=None):
|
||||||
|
"""Get the most recent open-ended ("live") syncer record.
|
||||||
|
|
||||||
|
:param current: Current block number
|
||||||
|
:type current: number
|
||||||
|
:param session: Session to use. If not specified, a separate session will be created for this method only.
|
||||||
|
:type session: SqlAlchemy Session
|
||||||
|
:returns: Block and transaction number, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
local_session = False
|
||||||
|
if session == None:
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
local_session = True
|
||||||
|
q = session.query(BlockchainSync)
|
||||||
|
q = q.filter(BlockchainSync.block_target==None)
|
||||||
|
q = q.order_by(BlockchainSync.date_created.desc())
|
||||||
|
o = q.first()
|
||||||
|
if local_session:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
if o == None:
|
||||||
|
return (0, 0)
|
||||||
|
|
||||||
|
return (o.block_cursor, o.tx_cursor)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_unsynced(session=None):
|
||||||
|
"""Get previous bounded sync sessions that did not complete.
|
||||||
|
|
||||||
|
:param session: Session to use. If not specified, a separate session will be created for this method only.
|
||||||
|
:type session: SqlAlchemy Session
|
||||||
|
:returns: Syncer database ids
|
||||||
|
:rtype: tuple, where first element is id
|
||||||
|
"""
|
||||||
|
unsynced = []
|
||||||
|
local_session = False
|
||||||
|
if session == None:
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
local_session = True
|
||||||
|
q = session.query(BlockchainSync.id)
|
||||||
|
q = q.filter(BlockchainSync.block_target!=None)
|
||||||
|
q = q.filter(BlockchainSync.block_cursor<BlockchainSync.block_target)
|
||||||
|
q = q.order_by(BlockchainSync.date_created.asc())
|
||||||
|
for u in q.all():
|
||||||
|
unsynced.append(u[0])
|
||||||
|
if local_session:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return unsynced
|
||||||
|
|
||||||
|
|
||||||
|
def set(self, block_height, tx_height):
|
||||||
|
"""Set the height of the syncer instance.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
|
||||||
|
:param block_height: Block number
|
||||||
|
:type block_height: number
|
||||||
|
:param tx_height: Block transaction number
|
||||||
|
:type tx_height: number
|
||||||
|
"""
|
||||||
|
self.block_cursor = block_height
|
||||||
|
self.tx_cursor = tx_height
|
||||||
|
|
||||||
|
|
||||||
|
def cursor(self):
|
||||||
|
"""Get current state of cursor from cached instance.
|
||||||
|
|
||||||
|
:returns: Block and transaction height, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
return (self.block_cursor, self.tx_cursor)
|
||||||
|
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Get sync block start position from cached instance.
|
||||||
|
|
||||||
|
:returns: Block and transaction height, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
return (self.block_start, self.tx_start)
|
||||||
|
|
||||||
|
|
||||||
|
def target(self):
|
||||||
|
"""Get sync block upper bound from cached instance.
|
||||||
|
|
||||||
|
:returns: Block number
|
||||||
|
:rtype: number, or None if sync is open-ended
|
||||||
|
"""
|
||||||
|
return self.block_target
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
"""Get chain the cached instance represents.
|
||||||
|
"""
|
||||||
|
return self.blockchain
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, chain, block_start, tx_start, block_target=None):
|
||||||
|
self.blockchain = chain
|
||||||
|
self.block_start = block_start
|
||||||
|
self.tx_start = tx_start
|
||||||
|
self.block_cursor = block_start
|
||||||
|
self.tx_cursor = tx_start
|
||||||
|
self.block_target = block_target
|
||||||
|
self.date_created = datetime.datetime.utcnow()
|
||||||
|
self.date_modified = datetime.datetime.utcnow()
|
162
apps/cic-eth/cic_eth/db/models/tx.py
Normal file
162
apps/cic-eth/cic_eth/db/models/tx.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from sqlalchemy import Column, String, Integer, DateTime, Enum, ForeignKey, Boolean
|
||||||
|
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
|
||||||
|
#from sqlalchemy.orm import relationship, backref
|
||||||
|
#from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import SessionBase
|
||||||
|
from .otx import Otx
|
||||||
|
from cic_eth.db.util import num_serialize
|
||||||
|
from cic_eth.error import NotLocalTxError
|
||||||
|
from cic_eth.db.error import TxStateChangeError
|
||||||
|
|
||||||
|
|
||||||
|
class TxCache(SessionBase):
|
||||||
|
"""Metadata expansions for outgoing transactions.
|
||||||
|
|
||||||
|
These records are not essential for handling of outgoing transaction queues. It is implemented to reduce the amount of computation spent of parsing and analysing raw signed transaction data.
|
||||||
|
|
||||||
|
Instantiation of the object will fail if an outgoing transaction record with the same transaction hash does not exist.
|
||||||
|
|
||||||
|
Typically three types of transactions are recorded:
|
||||||
|
|
||||||
|
- Token transfers; where source and destination token values and addresses are identical, sender and recipient differ.
|
||||||
|
- Token conversions; source and destination token values and addresses differ, sender and recipient are identical.
|
||||||
|
- Any other transaction; source and destination token addresses are zero-address.
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param sender: Ethereum address of transaction sender
|
||||||
|
:type sender: str, 0x-hex
|
||||||
|
:param recipient: Ethereum address of transaction beneficiary (e.g. token transfer recipient)
|
||||||
|
:type recipient: str, 0x-hex
|
||||||
|
:param source_token_address: Contract address of token that sender spent from
|
||||||
|
:type source_token_address: str, 0x-hex
|
||||||
|
:param destination_token_address: Contract address of token that recipient will receive balance of
|
||||||
|
:type destination_token_address: str, 0x-hex
|
||||||
|
:param from_value: Amount of source tokens spent
|
||||||
|
:type from_value: number
|
||||||
|
:param to_value: Amount of destination tokens received
|
||||||
|
:type to_value: number
|
||||||
|
:param block_number: Block height the transaction was mined at, or None if not yet mined
|
||||||
|
:type block_number: number or None
|
||||||
|
:param tx_number: Block transaction height the transaction was mined at, or None if not yet mined
|
||||||
|
:type tx_number: number or None
|
||||||
|
:raises FileNotFoundError: Outgoing transaction for given transaction hash does not exist
|
||||||
|
"""
|
||||||
|
__tablename__ = 'tx_cache'
|
||||||
|
|
||||||
|
otx_id = Column(Integer, ForeignKey('otx.id'))
|
||||||
|
source_token_address = Column(String(42))
|
||||||
|
destination_token_address = Column(String(42))
|
||||||
|
sender = Column(String(42))
|
||||||
|
recipient = Column(String(42))
|
||||||
|
from_value = Column(String())
|
||||||
|
to_value = Column(String())
|
||||||
|
block_number = Column(Integer())
|
||||||
|
tx_index = Column(Integer())
|
||||||
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
date_updated = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
date_checked = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
from_value_hex = bytes.fromhex(self.from_value)
|
||||||
|
from_value = int.from_bytes(from_value_hex, 'big')
|
||||||
|
|
||||||
|
to_value_hex = bytes.fromhex(self.to_value)
|
||||||
|
to_value = int.from_bytes(to_value_hex, 'big')
|
||||||
|
|
||||||
|
return (from_value, to_value)
|
||||||
|
|
||||||
|
|
||||||
|
def check(self):
|
||||||
|
"""Update the "checked" timestamp to current time.
|
||||||
|
|
||||||
|
Only manipulates object, does not transaction or commit to backend.
|
||||||
|
"""
|
||||||
|
self.date_checked = datetime.datetime.now()
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def clone(
|
||||||
|
tx_hash_original,
|
||||||
|
tx_hash_new,
|
||||||
|
session=None,
|
||||||
|
):
|
||||||
|
"""Copy tx cache data and associate it with a new transaction.
|
||||||
|
|
||||||
|
:param tx_hash_original: tx cache data to copy
|
||||||
|
:type tx_hash_original: str, 0x-hex
|
||||||
|
:param tx_hash_new: tx hash to associate the copied entry with
|
||||||
|
:type tx_hash_new: str, 0x-hex
|
||||||
|
"""
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = localsession.query(TxCache)
|
||||||
|
q = q.join(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash_original)
|
||||||
|
txc = q.first()
|
||||||
|
|
||||||
|
if txc == None:
|
||||||
|
raise NotLocalTxError('original {}'.format(tx_hash_original))
|
||||||
|
if txc.block_number != None:
|
||||||
|
raise TxStateChangeError('cannot clone tx cache of confirmed tx {}'.format(tx_hash_original))
|
||||||
|
|
||||||
|
q = localsession.query(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash_new)
|
||||||
|
otx = q.first()
|
||||||
|
|
||||||
|
if otx == None:
|
||||||
|
raise NotLocalTxError('new {}'.format(tx_hash_new))
|
||||||
|
|
||||||
|
values = txc.values()
|
||||||
|
txc_new = TxCache(
|
||||||
|
otx.tx_hash,
|
||||||
|
txc.sender,
|
||||||
|
txc.recipient,
|
||||||
|
txc.source_token_address,
|
||||||
|
txc.destination_token_address,
|
||||||
|
values[0],
|
||||||
|
values[1],
|
||||||
|
)
|
||||||
|
localsession.add(txc_new)
|
||||||
|
localsession.commit()
|
||||||
|
|
||||||
|
if session == None:
|
||||||
|
localsession.close()
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, tx_hash, sender, recipient, source_token_address, destination_token_address, from_value, to_value, block_number=None, tx_index=None):
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if tx == None:
|
||||||
|
session.close()
|
||||||
|
raise FileNotFoundError('outgoing transaction record unknown {} (add a Tx first)'.format(tx_hash))
|
||||||
|
self.otx_id = tx.id
|
||||||
|
|
||||||
|
# if tx == None:
|
||||||
|
# session.close()
|
||||||
|
# raise ValueError('tx hash {} (outgoing: {}) not found'.format(tx_hash, outgoing))
|
||||||
|
# session.close()
|
||||||
|
|
||||||
|
self.sender = sender
|
||||||
|
self.recipient = recipient
|
||||||
|
self.source_token_address = source_token_address
|
||||||
|
self.destination_token_address = destination_token_address
|
||||||
|
self.from_value = num_serialize(from_value).hex()
|
||||||
|
self.to_value = num_serialize(to_value).hex()
|
||||||
|
self.block_number = block_number
|
||||||
|
self.tx_index = tx_index
|
||||||
|
# not automatically set in sqlite, it seems:
|
||||||
|
self.date_created = datetime.datetime.now()
|
||||||
|
self.date_updated = self.date_created
|
||||||
|
self.date_checked = self.date_created
|
||||||
|
|
||||||
|
|
8
apps/cic-eth/cic_eth/db/util.py
Normal file
8
apps/cic-eth/cic_eth/db/util.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import math
|
||||||
|
|
||||||
|
def num_serialize(n):
|
||||||
|
if n == 0:
|
||||||
|
return b'\x00'
|
||||||
|
binlog = math.log2(n)
|
||||||
|
bytelength = int(binlog / 8 + 1)
|
||||||
|
return n.to_bytes(bytelength, 'big')
|
61
apps/cic-eth/cic_eth/error.py
Normal file
61
apps/cic-eth/cic_eth/error.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
class TokenCountError(Exception):
|
||||||
|
"""Exception raised when wrong number of tokens have been given to a task
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotLocalTxError(Exception):
|
||||||
|
"""Exception raised when trying to access a tx not originated from a local task
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PermanentTxError(Exception):
|
||||||
|
"""Exception raised when encountering a permanent error when sending a tx.
|
||||||
|
|
||||||
|
- wrong nonce
|
||||||
|
- insufficient balance
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TemporaryTxError(Exception):
|
||||||
|
"""Exception raised when encountering a permanent error when sending a tx.
|
||||||
|
|
||||||
|
- blockchain node connection
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class OutOfGasError(Exception):
|
||||||
|
"""Exception raised when a transaction task must yield pending gas refill for an account
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyFillingGasError(Exception):
|
||||||
|
"""Exception raised when additional gas refills are issued while one is still in progress
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InitializationError(Exception):
|
||||||
|
"""Exception raised when initialization state is insufficient to run component
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RoleMissingError(Exception):
|
||||||
|
"""Exception raised when web3 action attempted without an address with access to sign for it
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LockedError(Exception):
|
||||||
|
"""Exception raised when attempt is made to execute action that is deactivated by lock
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
16
apps/cic-eth/cic_eth/eth/__init__.py
Normal file
16
apps/cic-eth/cic_eth/eth/__init__.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Ethereum batch functions and utilities
|
||||||
|
|
||||||
|
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||||
|
|
||||||
|
"""
|
||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .rpc import RpcClient
|
||||||
|
|
||||||
|
registry_extra_identifiers = {
|
||||||
|
'Faucet': '0x{:0<64s}'.format(b'Faucet'.hex()),
|
||||||
|
'TransferApproval': '0x{:0<64s}'.format(b'TransferApproval'.hex()),
|
||||||
|
}
|
||||||
|
|
365
apps/cic-eth/cic_eth/eth/account.py
Normal file
365
apps/cic-eth/cic_eth/eth/account.py
Normal file
@ -0,0 +1,365 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import web3
|
||||||
|
import celery
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
from erc20_single_shot_faucet import Faucet
|
||||||
|
from cic_registry import zero_address
|
||||||
|
|
||||||
|
# local import
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.eth import registry_extra_identifiers
|
||||||
|
from cic_eth.eth.task import sign_and_register_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.error import RoleMissingError
|
||||||
|
|
||||||
|
#logg = logging.getLogger(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
class AccountTxFactory(TxFactory):
|
||||||
|
"""Factory for creating account index contract transactions
|
||||||
|
"""
|
||||||
|
def add(
|
||||||
|
self,
|
||||||
|
address,
|
||||||
|
chain_spec,
|
||||||
|
):
|
||||||
|
"""Register an Ethereum account address with the on-chain account registry
|
||||||
|
|
||||||
|
:param address: Ethereum account address to add
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param chain_spec: Chain to build transaction for
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Unsigned "AccountRegistry.add" transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
c = CICRegistry.get_contract(chain_spec, 'AccountRegistry')
|
||||||
|
f = c.function('add')
|
||||||
|
tx_add_buildable = f(
|
||||||
|
address,
|
||||||
|
)
|
||||||
|
gas = c.gas('add')
|
||||||
|
tx_add = tx_add_buildable.buildTransaction({
|
||||||
|
'from': self.address,
|
||||||
|
'gas': gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
'value': 0,
|
||||||
|
})
|
||||||
|
return tx_add
|
||||||
|
|
||||||
|
|
||||||
|
def gift(
|
||||||
|
self,
|
||||||
|
address,
|
||||||
|
chain_spec,
|
||||||
|
):
|
||||||
|
"""Trigger the on-chain faucet to disburse tokens to the provided Ethereum account
|
||||||
|
|
||||||
|
:param address: Ethereum account address to gift to
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param chain_spec: Chain to build transaction for
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Unsigned "Faucet.giveTo" transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
c = CICRegistry.get_contract(chain_spec, 'Faucet')
|
||||||
|
f = c.function('giveTo')
|
||||||
|
tx_add_buildable = f(address)
|
||||||
|
gas = c.gas('add')
|
||||||
|
tx_add = tx_add_buildable.buildTransaction({
|
||||||
|
'from': self.address,
|
||||||
|
'gas': gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
'value': 0,
|
||||||
|
})
|
||||||
|
return tx_add
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_register(data):
|
||||||
|
"""Verifies that a transaction is an "AccountRegister.add" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != '0a3b0a4f':
|
||||||
|
raise ValueError('Invalid account index register data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_gift(data):
|
||||||
|
"""Verifies that a transaction is a "Faucet.giveTo" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != '63e4bff4':
|
||||||
|
raise ValueError('Invalid account index register data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def create(password, chain_str):
|
||||||
|
"""Creates and stores a new ethereum account in the keystore.
|
||||||
|
|
||||||
|
The password is passed on to the wallet backend, no encryption is performed in the task worker.
|
||||||
|
|
||||||
|
:param password: Password to encrypt private key with
|
||||||
|
:type password: str
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Ethereum address of newly created account
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
a = c.w3.eth.personal.new_account(password)
|
||||||
|
logg.debug('created account {}'.format(a))
|
||||||
|
|
||||||
|
# Initialize nonce provider record for account
|
||||||
|
n = c.w3.eth.getTransactionCount(a, 'pending')
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Nonce).filter(Nonce.address_hex==a).first()
|
||||||
|
if o == None:
|
||||||
|
o = Nonce()
|
||||||
|
o.address_hex = a
|
||||||
|
o.nonce = n
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, throws=(RoleMissingError,))
|
||||||
|
def register(self, account_address, chain_str, writer_address=None):
|
||||||
|
"""Creates a transaction to add the given address to the accounts index.
|
||||||
|
|
||||||
|
:param account_address: Ethereum address to add
|
||||||
|
:type account_address: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param writer_address: Specify address in keystore to sign transaction. Overrides local accounts role setting.
|
||||||
|
:type writer_address: str, 0x-hex
|
||||||
|
:raises RoleMissingError: Writer address not set and writer role not found.
|
||||||
|
:returns: The account_address input param
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
if writer_address == None:
|
||||||
|
writer_address = AccountRole.get_address('ACCOUNTS_INDEX_WRITER')
|
||||||
|
|
||||||
|
if writer_address == zero_address:
|
||||||
|
raise RoleMissingError(account_address)
|
||||||
|
|
||||||
|
|
||||||
|
logg.debug('adding account address {} to index; writer {}'.format(account_address, writer_address))
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=writer_address)
|
||||||
|
txf = AccountTxFactory(writer_address, c)
|
||||||
|
|
||||||
|
tx_add = txf.add(account_address, chain_spec)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_add, chain_str, queue, 'cic_eth.eth.account.cache_account_data')
|
||||||
|
|
||||||
|
gas_budget = tx_add['gas'] * tx_add['gasPrice']
|
||||||
|
|
||||||
|
logg.debug('register user tx {}'.format(tx_hash_hex))
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
writer_address,
|
||||||
|
gas_budget,
|
||||||
|
tx_hashes_hex=[tx_hash_hex],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return account_address
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def gift(self, account_address, chain_str):
|
||||||
|
"""Creates a transaction to invoke the faucet contract for the given address.
|
||||||
|
|
||||||
|
:param account_address: Ethereum address to give to
|
||||||
|
:type account_address: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Raw signed transaction
|
||||||
|
:rtype: list with transaction as only element
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
logg.debug('gift account address {} to index'.format(account_address))
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=account_address)
|
||||||
|
txf = AccountTxFactory(account_address, c)
|
||||||
|
|
||||||
|
tx_add = txf.gift(account_address, chain_spec)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_add, chain_str, queue, 'cic_eth.eth.account.cache_gift_data')
|
||||||
|
|
||||||
|
gas_budget = tx_add['gas'] * tx_add['gasPrice']
|
||||||
|
|
||||||
|
logg.debug('register user tx {}'.format(tx_hash_hex))
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
account_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return [tx_signed_raw_hex]
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def have(self, account, chain_str):
|
||||||
|
"""Check whether the given account exists in keystore
|
||||||
|
|
||||||
|
:param account: Account to check
|
||||||
|
:type account: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Account, or None if not exists
|
||||||
|
:rtype: Varies
|
||||||
|
"""
|
||||||
|
c = RpcClient(account)
|
||||||
|
try:
|
||||||
|
c.w3.eth.sign(account, text='2a')
|
||||||
|
return account
|
||||||
|
except Exception as e:
|
||||||
|
logg.debug('cannot sign with {}: {}'.format(account, e))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def role(self, account, chain_str):
|
||||||
|
"""Return account role for address
|
||||||
|
|
||||||
|
:param account: Account to check
|
||||||
|
:type account: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Account, or None if not exists
|
||||||
|
:rtype: Varies
|
||||||
|
"""
|
||||||
|
return AccountRole.role_for(account)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_gift_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for a Faucet.giveTo transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
tx_data = unpack_gift(tx['data'])
|
||||||
|
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx['to'],
|
||||||
|
zero_address,
|
||||||
|
zero_address,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_account_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for an AccountsIndex.add transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
tx_data = unpack_register(tx['data'])
|
||||||
|
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx['to'],
|
||||||
|
zero_address,
|
||||||
|
zero_address,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
385
apps/cic-eth/cic_eth/eth/bancor.py
Normal file
385
apps/cic-eth/cic_eth/eth/bancor.py
Normal file
@ -0,0 +1,385 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db import SessionBase
|
||||||
|
from cic_eth.db.models.convert import TxConvertTransfer
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.eth.task import sign_and_register_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
from cic_eth.eth.token import TokenTxFactory
|
||||||
|
from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.eth.rpc import RpcClient
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
#logg = celery_app.log.get_default_logger()
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
contract_function_signatures = {
|
||||||
|
'convert': 'f3898a97',
|
||||||
|
'convert2': '569706eb',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class BancorTxFactory(TxFactory):
|
||||||
|
|
||||||
|
"""Factory for creating Bancor network transactions.
|
||||||
|
"""
|
||||||
|
def convert(
|
||||||
|
self,
|
||||||
|
source_token_address,
|
||||||
|
destination_token_address,
|
||||||
|
reserve_address,
|
||||||
|
source_amount,
|
||||||
|
minimum_return,
|
||||||
|
chain_spec,
|
||||||
|
fee_beneficiary='0x0000000000000000000000000000000000000000',
|
||||||
|
fee_ppm=0,
|
||||||
|
):
|
||||||
|
"""Create a BancorNetwork "convert" transaction.
|
||||||
|
|
||||||
|
:param source_token_address: ERC20 contract address for token to convert from
|
||||||
|
:type source_token_address: str, 0x-hex
|
||||||
|
:param destination_token_address: ERC20 contract address for token to convert to
|
||||||
|
:type destination_token_address: str, 0x-hex
|
||||||
|
:param reserve_address: ERC20 contract address of Common reserve token
|
||||||
|
:type reserve_address: str, 0x-hex
|
||||||
|
:param source_amount: Amount of source tokens to convert
|
||||||
|
:type source_amount: int
|
||||||
|
:param minimum_return: Minimum amount of destination tokens to accept as result for conversion
|
||||||
|
:type source_amount: int
|
||||||
|
:return: Unsigned "convert" transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
network_contract = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
|
||||||
|
network_gas = network_contract.gas('convert')
|
||||||
|
tx_convert_buildable = network_contract.contract.functions.convert2(
|
||||||
|
[
|
||||||
|
source_token_address,
|
||||||
|
source_token_address,
|
||||||
|
reserve_address,
|
||||||
|
destination_token_address,
|
||||||
|
destination_token_address,
|
||||||
|
],
|
||||||
|
source_amount,
|
||||||
|
minimum_return,
|
||||||
|
fee_beneficiary,
|
||||||
|
fee_ppm,
|
||||||
|
)
|
||||||
|
tx_convert = tx_convert_buildable.buildTransaction({
|
||||||
|
'from': self.address,
|
||||||
|
'gas': network_gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
})
|
||||||
|
return tx_convert
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_convert(data):
|
||||||
|
f = data[2:10]
|
||||||
|
if f != contract_function_signatures['convert2']:
|
||||||
|
raise ValueError('Invalid convert data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
path = d[384:]
|
||||||
|
source = path[64-40:64]
|
||||||
|
destination = path[-40:]
|
||||||
|
|
||||||
|
amount = int(d[64:128], 16)
|
||||||
|
min_return = int(d[128:192], 16)
|
||||||
|
fee_recipient = d[192:256]
|
||||||
|
fee = int(d[256:320], 16)
|
||||||
|
return {
|
||||||
|
'amount': amount,
|
||||||
|
'min_return': min_return,
|
||||||
|
'source_token': web3.Web3.toChecksumAddress('0x' + source),
|
||||||
|
'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
|
||||||
|
'fee_recipient': fee_recipient,
|
||||||
|
'fee': fee,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Kept for historical reference, it unpacks a convert call without fee parameters
|
||||||
|
#def _unpack_convert_mint(data):
|
||||||
|
# f = data[2:10]
|
||||||
|
# if f != contract_function_signatures['convert2']:
|
||||||
|
# raise ValueError('Invalid convert data ({})'.format(f))
|
||||||
|
#
|
||||||
|
# d = data[10:]
|
||||||
|
# path = d[256:]
|
||||||
|
# source = path[64-40:64]
|
||||||
|
# destination = path[-40:]
|
||||||
|
#
|
||||||
|
# amount = int(d[64:128], 16)
|
||||||
|
# min_return = int(d[128:192], 16)
|
||||||
|
# return {
|
||||||
|
# 'amount': amount,
|
||||||
|
# 'min_return': min_return,
|
||||||
|
# 'source_token': web3.Web3.toChecksumAddress('0x' + source),
|
||||||
|
# 'destination_token': web3.Web3.toChecksumAddress('0x' + destination),
|
||||||
|
# }
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def convert_with_default_reserve(self, tokens, from_address, source_amount, minimum_return, to_address, chain_str):
|
||||||
|
"""Performs a conversion between two liquid tokens using Bancor network.
|
||||||
|
|
||||||
|
:param tokens: Token pair, source and destination respectively
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param source_amount: Amount of source tokens to convert
|
||||||
|
:type source_amount: int
|
||||||
|
:param minimum_return: Minimum about of destination tokens to receive
|
||||||
|
:type minimum_return: int
|
||||||
|
"""
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=from_address)
|
||||||
|
|
||||||
|
cr = CICRegistry.get_contract(chain_spec, 'BancorNetwork')
|
||||||
|
source_token = CICRegistry.get_address(chain_spec, tokens[0]['address'])
|
||||||
|
reserve_address = CICRegistry.get_contract(chain_spec, 'BNTToken', 'ERC20').address()
|
||||||
|
|
||||||
|
tx_factory = TokenTxFactory(from_address, c)
|
||||||
|
|
||||||
|
tx_approve_zero = tx_factory.approve(source_token.address(), cr.address(), 0, chain_spec)
|
||||||
|
(tx_approve_zero_hash_hex, tx_approve_zero_signed_hex) = sign_and_register_tx(tx_approve_zero, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
|
||||||
|
|
||||||
|
tx_approve = tx_factory.approve(source_token.address(), cr.address(), source_amount, chain_spec)
|
||||||
|
(tx_approve_hash_hex, tx_approve_signed_hex) = sign_and_register_tx(tx_approve, chain_str, queue, 'cic_eth.eth.token.otx_cache_approve')
|
||||||
|
|
||||||
|
tx_factory = BancorTxFactory(from_address, c)
|
||||||
|
tx_convert = tx_factory.convert(
|
||||||
|
tokens[0]['address'],
|
||||||
|
tokens[1]['address'],
|
||||||
|
reserve_address,
|
||||||
|
source_amount,
|
||||||
|
minimum_return,
|
||||||
|
chain_spec,
|
||||||
|
)
|
||||||
|
(tx_convert_hash_hex, tx_convert_signed_hex) = sign_and_register_tx(tx_convert, chain_str, queue, 'cic_eth.eth.bancor.otx_cache_convert')
|
||||||
|
|
||||||
|
# TODO: consider moving save recipient to async task / chain it before the tx send
|
||||||
|
if to_address != None:
|
||||||
|
save_convert_recipient(tx_convert_hash_hex, to_address, chain_str)
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_approve_zero_signed_hex, tx_approve_signed_hex, tx_convert_signed_hex],
|
||||||
|
chain_str,
|
||||||
|
from_address,
|
||||||
|
tx_approve_zero['gasPrice'] * tx_approve_zero['gas'],
|
||||||
|
tx_hashes_hex=[tx_approve_hash_hex],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_convert_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
#@celery_app.task()
|
||||||
|
#def process_approval(tx_hash_hex):
|
||||||
|
# t = session.query(TxConvertTransfer).query(TxConvertTransfer.approve_tx_hash==tx_hash_hex).first()
|
||||||
|
# c = session.query(Otx).query(Otx.tx_hash==t.convert_tx_hash)
|
||||||
|
# gas_limit = 8000000
|
||||||
|
# gas_price = GasOracle.gas_price()
|
||||||
|
#
|
||||||
|
# # TODO: use celery group instead
|
||||||
|
# s_queue = celery.signature(
|
||||||
|
# 'cic_eth.queue.tx.create',
|
||||||
|
# [
|
||||||
|
# nonce,
|
||||||
|
# c['address'], # TODO: check that this is in fact sender address
|
||||||
|
# c['tx_hash'],
|
||||||
|
# c['signed_tx'],
|
||||||
|
# ]
|
||||||
|
# )
|
||||||
|
# s_queue.apply_async()
|
||||||
|
#
|
||||||
|
# s_check_gas = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.check_gas',
|
||||||
|
# [
|
||||||
|
# c['address'],
|
||||||
|
# [c['signed_tx']],
|
||||||
|
# gas_limit * gas_price,
|
||||||
|
# ]
|
||||||
|
# )
|
||||||
|
# s_send = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.send',
|
||||||
|
# [],
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# s_set_sent = celery.signature(
|
||||||
|
# 'cic_eth.queue.tx.set_sent_status',
|
||||||
|
# [False],
|
||||||
|
# )
|
||||||
|
# s_send.link(s_set_sent)
|
||||||
|
# s_check_gas.link(s_send)
|
||||||
|
# s_check_gas.apply_async()
|
||||||
|
# return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def save_convert_recipient(convert_hash, recipient_address, chain_str):
|
||||||
|
"""Registers the recipient target for a convert-and-transfer operation.
|
||||||
|
|
||||||
|
:param convert_hash: Transaction hash of convert operation
|
||||||
|
:type convert_hash: str, 0x-hex
|
||||||
|
:param recipient_address: Address of consequtive transfer recipient
|
||||||
|
:type recipient_address: str, 0x-hex
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
t = TxConvertTransfer(convert_hash, recipient_address, chain_str)
|
||||||
|
session.add(t)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def save_convert_transfer(convert_hash, transfer_hash):
|
||||||
|
"""Registers that the transfer part of a convert-and-transfer operation has been executed.
|
||||||
|
|
||||||
|
:param convert_hash: Transaction hash of convert operation
|
||||||
|
:type convert_hash: str, 0x-hex
|
||||||
|
:param convert_hash: Transaction hash of transfer operation
|
||||||
|
:type convert_hash: str, 0x-hex
|
||||||
|
:returns: transfer_hash,
|
||||||
|
:rtype: list, single str, 0x-hex
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
t = TxConvertTransfer.get(convert_hash)
|
||||||
|
t.transfer(transfer_hash)
|
||||||
|
session.add(t)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
return [transfer_hash]
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: seems unused, consider removing
|
||||||
|
@celery_app.task()
|
||||||
|
def resolve_converters_by_tokens(tokens, chain_str):
|
||||||
|
"""Return converters for a list of tokens.
|
||||||
|
|
||||||
|
:param tokens: Token addresses to look up
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:return: Addresses of matching converters
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
for t in tokens:
|
||||||
|
c = CICRegistry.get_contract(chain_spec, 'ConverterRegistry')
|
||||||
|
fn = c.function('getConvertersByAnchors')
|
||||||
|
try:
|
||||||
|
converters = fn([t['address']]).call()
|
||||||
|
except Exception as e:
|
||||||
|
raise e
|
||||||
|
t['converters'] = converters
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def transfer_converted(self, tokens, holder_address, receiver_address, value, tx_convert_hash_hex, chain_str):
|
||||||
|
"""Execute the ERC20 transfer of a convert-and-transfer operation.
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param holder_address: Token receiver address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
:return: Transaction hash
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=holder_address)
|
||||||
|
|
||||||
|
# get transaction parameters
|
||||||
|
gas_price = c.gas_price()
|
||||||
|
tx_factory = TokenTxFactory(holder_address, c)
|
||||||
|
|
||||||
|
token_address = tokens[0]['address']
|
||||||
|
tx_transfer = tx_factory.transfer(
|
||||||
|
token_address,
|
||||||
|
receiver_address,
|
||||||
|
value,
|
||||||
|
chain_spec,
|
||||||
|
)
|
||||||
|
(tx_transfer_hash_hex, tx_transfer_signed_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, 'cic_eth.eth.token.otx_cache_transfer')
|
||||||
|
|
||||||
|
# send transaction
|
||||||
|
logg.info('transfer converted token {} from {} to {} value {} {}'.format(token_address, holder_address, receiver_address, value, tx_transfer_signed_hex))
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_transfer_signed_hex],
|
||||||
|
chain_str,
|
||||||
|
holder_address,
|
||||||
|
tx_transfer['gasPrice'] * tx_transfer['gas'],
|
||||||
|
None,
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s_save = celery.signature(
|
||||||
|
'cic_eth.eth.bancor.save_convert_transfer',
|
||||||
|
[
|
||||||
|
tx_convert_hash_hex,
|
||||||
|
tx_transfer_hash_hex,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_save.link(s)
|
||||||
|
s_save.apply_async()
|
||||||
|
return tx_transfer_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def otx_cache_convert(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
tx_data = unpack_convert(tx['data'])
|
||||||
|
logg.debug('tx data {}'.format(tx_data))
|
||||||
|
|
||||||
|
session = TxCache.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx['from'],
|
||||||
|
tx_data['source_token'],
|
||||||
|
tx_data['destination_token'],
|
||||||
|
tx_data['amount'],
|
||||||
|
tx_data['amount'],
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
41
apps/cic-eth/cic_eth/eth/factory.py
Normal file
41
apps/cic-eth/cic_eth/eth/factory.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_eth.eth.nonce import NonceOracle
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TxFactory:
|
||||||
|
"""Base class for transaction factory classes.
|
||||||
|
|
||||||
|
:param from_address: Signer address to create transaction on behalf of
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param rpc_client: RPC connection object to use to acquire account nonce if no record in nonce cache
|
||||||
|
:type rpc_client: cic_eth.eth.rpc.RpcClient
|
||||||
|
"""
|
||||||
|
|
||||||
|
gas_price = 100
|
||||||
|
"""Gas price, updated between batches"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, from_address, rpc_client):
|
||||||
|
self.address = from_address
|
||||||
|
|
||||||
|
self.default_nonce = rpc_client.w3.eth.getTransactionCount(from_address, 'pending')
|
||||||
|
self.nonce_oracle = NonceOracle(from_address, self.default_nonce)
|
||||||
|
|
||||||
|
TxFactory.gas_price = rpc_client.gas_price()
|
||||||
|
logg.debug('txfactory instance address {} gas price'.format(self.address, self.gas_price))
|
||||||
|
|
||||||
|
|
||||||
|
def next_nonce(self):
|
||||||
|
"""Returns the current cached nonce value, and increments it for next transaction.
|
||||||
|
|
||||||
|
:returns: Nonce
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
return self.nonce_oracle.next()
|
71
apps/cic-eth/cic_eth/eth/gas.py
Normal file
71
apps/cic-eth/cic_eth/eth/gas.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class GasOracle():
|
||||||
|
"""Provides gas pricing for transactions.
|
||||||
|
|
||||||
|
:param w3: Web3 object
|
||||||
|
:type w3: web3.Web3
|
||||||
|
"""
|
||||||
|
|
||||||
|
__safe_threshold_amount_value = 2000000000 * 60000 * 3
|
||||||
|
__refill_amount_value = __safe_threshold_amount_value * 5
|
||||||
|
default_gas_limit = 21000
|
||||||
|
|
||||||
|
def __init__(self, w3):
|
||||||
|
self.w3 = w3
|
||||||
|
self.gas_price_current = w3.eth.gas_price()
|
||||||
|
|
||||||
|
|
||||||
|
def safe_threshold_amount(self):
|
||||||
|
"""The gas balance threshold under which a new gas refill transaction should be initiated.
|
||||||
|
|
||||||
|
:returns: Gas token amount
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
g = GasOracle.__safe_threshold_amount_value
|
||||||
|
logg.warning('gas safe threshold is currently hardcoded to {}'.format(g))
|
||||||
|
return g
|
||||||
|
|
||||||
|
|
||||||
|
def refill_amount(self):
|
||||||
|
"""The amount of gas tokens to send in a gas refill transaction.
|
||||||
|
|
||||||
|
:returns: Gas token amount
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
g = GasOracle.__refill_amount_value
|
||||||
|
logg.warning('gas refill amount is currently hardcoded to {}'.format(g))
|
||||||
|
return g
|
||||||
|
|
||||||
|
|
||||||
|
def gas_provider(self):
|
||||||
|
"""Gas provider address.
|
||||||
|
|
||||||
|
:returns: Etheerum account address
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
return AccountRole.get_address('GAS_GIFTER')
|
||||||
|
|
||||||
|
|
||||||
|
def gas_price(self, category='safe'):
|
||||||
|
"""Get projected gas price to use for a transaction at the current moment.
|
||||||
|
|
||||||
|
When the category parameter is implemented, it can be used to control the priority of a transaction in the network.
|
||||||
|
|
||||||
|
:param category: Bid level category to return price for. Currently has no effect.
|
||||||
|
:type category: str
|
||||||
|
:returns: Gas price
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
#logg.warning('gas price hardcoded to category "safe"')
|
||||||
|
#g = 100
|
||||||
|
#return g
|
||||||
|
return self.gas_price_current
|
23
apps/cic-eth/cic_eth/eth/nonce.py
Normal file
23
apps/cic-eth/cic_eth/eth/nonce.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
|
|
||||||
|
class NonceOracle():
|
||||||
|
"""Ensures atomic nonce increments for all transactions across all tasks and threads.
|
||||||
|
|
||||||
|
:param address: Address to generate nonces for
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param default_nonce: Initial nonce value to use if no nonce cache entry already exists
|
||||||
|
:type default_nonce: number
|
||||||
|
"""
|
||||||
|
def __init__(self, address, default_nonce):
|
||||||
|
self.address = address
|
||||||
|
self.default_nonce = default_nonce
|
||||||
|
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
"""Get next unique nonce.
|
||||||
|
|
||||||
|
:returns: Nonce
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
return Nonce.next(self.address, self.default_nonce)
|
194
apps/cic-eth/cic_eth/eth/request.py
Normal file
194
apps/cic-eth/cic_eth/eth/request.py
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import web3
|
||||||
|
import celery
|
||||||
|
from erc20_approval_escrow import TransferApproval
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.eth.task import sign_and_register_tx
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
from cic_eth.error import TokenCountError
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
contract_function_signatures = {
|
||||||
|
'request': 'b0addede',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TransferRequestTxFactory(TxFactory):
|
||||||
|
"""Factory for creating Transfer request transactions using the TransferApproval contract backend
|
||||||
|
"""
|
||||||
|
def request(
|
||||||
|
self,
|
||||||
|
token_address,
|
||||||
|
beneficiary_address,
|
||||||
|
amount,
|
||||||
|
chain_spec,
|
||||||
|
):
|
||||||
|
"""Create a new TransferApproval.request transaction
|
||||||
|
|
||||||
|
:param token_address: Token to create transfer request for
|
||||||
|
:type token_address: str, 0x-hex
|
||||||
|
:param beneficiary_address: Beneficiary of token transfer
|
||||||
|
:type beneficiary_address: str, 0x-hex
|
||||||
|
:param amount: Amount of tokens to transfer
|
||||||
|
:type amount: number
|
||||||
|
:param chain_spec: Chain spec
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
transfer_approval = CICRegistry.get_contract(chain_spec, 'TransferApproval', 'TransferAuthorization')
|
||||||
|
fn = transfer_approval.function('createRequest')
|
||||||
|
tx_approval_buildable = fn(beneficiary_address, token_address, amount)
|
||||||
|
transfer_approval_gas = transfer_approval.gas('createRequest')
|
||||||
|
|
||||||
|
tx_approval = tx_approval_buildable.buildTransaction({
|
||||||
|
'from': self.address,
|
||||||
|
'gas': transfer_approval_gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
})
|
||||||
|
return tx_approval
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_transfer_approval_request(data):
|
||||||
|
"""Verifies that a transaction is an "TransferApproval.request" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != contract_function_signatures['request']:
|
||||||
|
raise ValueError('Invalid transfer request data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
'token': web3.Web3.toChecksumAddress('0x' + d[128-40:128]),
|
||||||
|
'amount': int(d[128:], 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def transfer_approval_request(self, tokens, holder_address, receiver_address, value, chain_str):
|
||||||
|
"""Creates a new transfer approval
|
||||||
|
|
||||||
|
:param tokens: Token to generate transfer request for
|
||||||
|
:type tokens: list with single token spec as dict
|
||||||
|
:param holder_address: Address to generate transfer on behalf of
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Address to transfser tokens to
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of tokens to transfer
|
||||||
|
:type value: number
|
||||||
|
:param chain_spec: Chain spec string representation
|
||||||
|
:type chain_spec: str
|
||||||
|
:raises cic_eth.error.TokenCountError: More than one token in tokens argument
|
||||||
|
:returns: Raw signed transaction
|
||||||
|
:rtype: list with transaction as only element
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
t = tokens[0]
|
||||||
|
|
||||||
|
c = RpcClient(holder_address)
|
||||||
|
|
||||||
|
txf = TransferRequestTxFactory(holder_address, c)
|
||||||
|
|
||||||
|
tx_transfer = txf.request(t['address'], receiver_address, value, chain_spec)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, 'cic_eth.eth.request.otx_cache_transfer_approval_request')
|
||||||
|
|
||||||
|
gas_budget = tx_transfer['gas'] * tx_transfer['gasPrice']
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return [tx_signed_raw_hex]
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def otx_cache_transfer_approval_request(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for an TransferApproval.request transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
logg.debug('in otx acche transfer approval request')
|
||||||
|
(txc, cache_id) = cache_transfer_approval_request_data(tx_hash_hex, tx)
|
||||||
|
return txc
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_transfer_approval_request_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_transfer_approval_request
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
tx_data = unpack_transfer_approval_request(tx['data'])
|
||||||
|
logg.debug('tx approval request data {}'.format(tx_data))
|
||||||
|
logg.debug('tx approval request {}'.format(tx))
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx_data['to'],
|
||||||
|
tx_data['token'],
|
||||||
|
tx_data['token'],
|
||||||
|
tx_data['amount'],
|
||||||
|
tx_data['amount'],
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
39
apps/cic-eth/cic_eth/eth/rpc.py
Normal file
39
apps/cic-eth/cic_eth/eth/rpc.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.eth.gas import GasOracle
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class RpcClient(GasOracle):
|
||||||
|
"""RPC wrapper for web3 enabling gas calculation helpers and signer middleware.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:param holder_address: DEPRECATED Address of subject of the session.
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
"""
|
||||||
|
|
||||||
|
signer_ipc_path = None
|
||||||
|
"""Unix socket path to JSONRPC signer and keystore"""
|
||||||
|
|
||||||
|
web3_constructor = None
|
||||||
|
"""Custom function to build a web3 object with middleware plugins"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, chain_spec, holder_address=None):
|
||||||
|
(self.provider, w3) = RpcClient.web3_constructor()
|
||||||
|
super(RpcClient, self).__init__(w3)
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
if holder_address != None:
|
||||||
|
self.holder_address = holder_address
|
||||||
|
logg.info('gasprice {}'.format(self.gas_price()))
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def set_constructor(web3_constructor):
|
||||||
|
"""Sets the constructor to use for building the web3 object.
|
||||||
|
"""
|
||||||
|
RpcClient.web3_constructor = web3_constructor
|
136
apps/cic-eth/cic_eth/eth/task.py
Normal file
136
apps/cic-eth/cic_eth/eth/task.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.queue.tx import create as queue_create
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = celery_app.log.get_default_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def sign_tx(tx, chain_str):
|
||||||
|
"""Sign a single transaction against the given chain specification.
|
||||||
|
|
||||||
|
:param tx: Transaction in standard Ethereum format
|
||||||
|
:type tx: dict
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and raw signed transaction, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
tx_transfer_signed = c.w3.eth.sign_transaction(tx)
|
||||||
|
logg.debug('tx_transfer_signed {}'.format(tx_transfer_signed))
|
||||||
|
tx_hash = c.w3.keccak(hexstr=tx_transfer_signed['raw'])
|
||||||
|
tx_hash_hex = tx_hash.hex()
|
||||||
|
return (tx_hash_hex, tx_transfer_signed['raw'],)
|
||||||
|
|
||||||
|
|
||||||
|
def sign_and_register_tx(tx, chain_str, queue, cache_task=None):
|
||||||
|
"""Signs the provided transaction, and adds it to the transaction queue cache (with status PENDING).
|
||||||
|
|
||||||
|
:param tx: Standard ethereum transaction data
|
||||||
|
:type tx: dict
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param queue: Task queue
|
||||||
|
:type queue: str
|
||||||
|
:param cache_task: Cache task to call with signed transaction. If None, no task will be called.
|
||||||
|
:type cache_task: str
|
||||||
|
:returns: Tuple; Transaction hash, signed raw transaction data
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
||||||
|
|
||||||
|
logg.debug('adding queue tx {}'.format(tx_hash_hex))
|
||||||
|
|
||||||
|
# s = celery.signature(
|
||||||
|
# 'cic_eth.queue.tx.create',
|
||||||
|
# [
|
||||||
|
# tx['nonce'],
|
||||||
|
# tx['from'],
|
||||||
|
# tx_hash_hex,
|
||||||
|
# tx_signed_raw_hex,
|
||||||
|
# chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=queue,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: consider returning this as a signature that consequtive tasks can be linked to
|
||||||
|
queue_create(
|
||||||
|
tx['nonce'],
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
)
|
||||||
|
|
||||||
|
if cache_task != None:
|
||||||
|
logg.debug('adding cache task {} tx {}'.format(cache_task, tx_hash_hex))
|
||||||
|
s_cache = celery.signature(
|
||||||
|
cache_task,
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_cache.apply_async()
|
||||||
|
|
||||||
|
return (tx_hash_hex, tx_signed_raw_hex,)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: rename as we will not be sending task in the chain, this is the responsibility of the dispatcher
|
||||||
|
def create_check_gas_and_send_task(tx_signed_raws_hex, chain_str, holder_address, gas, tx_hashes_hex=None, queue=None):
|
||||||
|
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
|
||||||
|
|
||||||
|
If tx_hashes_hex is not spefified, a preceding task chained to check_gas must supply the transaction hashes as its return value.
|
||||||
|
|
||||||
|
:param tx_signed_raws_hex: Raw signed transaction data
|
||||||
|
:type tx_signed_raws_hex: list of str, 0x-hex
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param holder_address: Address sending the transactions
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param gas: Gas budget hint for transactions
|
||||||
|
:type gas: int
|
||||||
|
:param tx_hashes_hex: Transaction hashes
|
||||||
|
:type tx_hashes_hex: list of str, 0x-hex
|
||||||
|
:param queue: Task queue
|
||||||
|
:type queue: str
|
||||||
|
:returns: Signature of task chain
|
||||||
|
:rtype: celery.Signature
|
||||||
|
"""
|
||||||
|
s_check_gas = None
|
||||||
|
if tx_hashes_hex != None:
|
||||||
|
s_check_gas = celery.signature(
|
||||||
|
'cic_eth.eth.tx.check_gas',
|
||||||
|
[
|
||||||
|
tx_hashes_hex,
|
||||||
|
chain_str,
|
||||||
|
tx_signed_raws_hex,
|
||||||
|
holder_address,
|
||||||
|
gas,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
s_check_gas = celery.signature(
|
||||||
|
'cic_eth.eth.tx.check_gas',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
tx_signed_raws_hex,
|
||||||
|
holder_address,
|
||||||
|
gas,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
return s_check_gas
|
447
apps/cic-eth/cic_eth/eth/token.py
Normal file
447
apps/cic-eth/cic_eth/eth/token.py
Normal file
@ -0,0 +1,447 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
import requests
|
||||||
|
import web3
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# platform imports
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.error import TokenCountError, PermanentTxError, OutOfGasError, NotLocalTxError
|
||||||
|
from cic_eth.eth.task import sign_and_register_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
# TODO: fetch from cic-contracts instead when implemented
|
||||||
|
contract_function_signatures = {
|
||||||
|
'transfer': 'a9059cbb',
|
||||||
|
'approve': '095ea7b3',
|
||||||
|
'transferfrom': '23b872dd',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TokenTxFactory(TxFactory):
|
||||||
|
"""Factory for creating ERC20 token transactions.
|
||||||
|
"""
|
||||||
|
def approve(
|
||||||
|
self,
|
||||||
|
token_address,
|
||||||
|
spender_address,
|
||||||
|
amount,
|
||||||
|
chain_spec,
|
||||||
|
):
|
||||||
|
"""Create an ERC20 "approve" transaction
|
||||||
|
|
||||||
|
:param token_address: ERC20 contract address
|
||||||
|
:type token_address: str, 0x-hex
|
||||||
|
:param spender_address: Address to approve spending for
|
||||||
|
:type spender_address: str, 0x-hex
|
||||||
|
:param amount: Amount of tokens to approve
|
||||||
|
:type amount: int
|
||||||
|
:param chain_spec: Chain spec
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Unsigned "approve" transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
source_token = CICRegistry.get_address(chain_spec, token_address)
|
||||||
|
source_token_contract = source_token.contract
|
||||||
|
tx_approve_buildable = source_token_contract.functions.approve(
|
||||||
|
spender_address,
|
||||||
|
amount,
|
||||||
|
)
|
||||||
|
source_token_gas = source_token.gas('transfer')
|
||||||
|
|
||||||
|
tx_approve = tx_approve_buildable.buildTransaction({
|
||||||
|
'from': self.address,
|
||||||
|
'gas': source_token_gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
})
|
||||||
|
return tx_approve
|
||||||
|
|
||||||
|
|
||||||
|
def transfer(
|
||||||
|
self,
|
||||||
|
token_address,
|
||||||
|
receiver_address,
|
||||||
|
value,
|
||||||
|
chain_spec,
|
||||||
|
):
|
||||||
|
"""Create an ERC20 "transfer" transaction
|
||||||
|
|
||||||
|
:param token_address: ERC20 contract address
|
||||||
|
:type token_address: str, 0x-hex
|
||||||
|
:param receiver_address: Address to send tokens to
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param amount: Amount of tokens to send
|
||||||
|
:type amount: int
|
||||||
|
:param chain_spec: Chain spec
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Unsigned "transfer" transaction in standard Ethereum format
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
source_token = CICRegistry.get_address(chain_spec, token_address)
|
||||||
|
source_token_contract = source_token.contract
|
||||||
|
transfer_buildable = source_token_contract.functions.transfer(
|
||||||
|
receiver_address,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
source_token_gas = source_token.gas('transfer')
|
||||||
|
|
||||||
|
tx_transfer = transfer_buildable.buildTransaction(
|
||||||
|
{
|
||||||
|
'from': self.address,
|
||||||
|
'gas': source_token_gas,
|
||||||
|
'gasPrice': self.gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': self.next_nonce(),
|
||||||
|
})
|
||||||
|
return tx_transfer
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_transfer(data):
|
||||||
|
"""Verifies that a transaction is an "ERC20.transfer" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != contract_function_signatures['transfer']:
|
||||||
|
raise ValueError('Invalid transfer data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
'amount': int(d[64:], 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_transferfrom(data):
|
||||||
|
"""Verifies that a transaction is an "ERC20.transferFrom" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != contract_function_signatures['transferfrom']:
|
||||||
|
raise ValueError('Invalid transferFrom data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'from': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[128-40:128]),
|
||||||
|
'amount': int(d[128:], 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_approve(data):
|
||||||
|
"""Verifies that a transaction is an "ERC20.approve" transaction, and extracts call parameters from it.
|
||||||
|
|
||||||
|
:param data: Raw input data from Ethereum transaction.
|
||||||
|
:type data: str, 0x-hex
|
||||||
|
:raises ValueError: Function signature does not match AccountRegister.add
|
||||||
|
:returns: Parsed parameters
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
f = data[2:10]
|
||||||
|
if f != contract_function_signatures['approve']:
|
||||||
|
raise ValueError('Invalid approval data ({})'.format(f))
|
||||||
|
|
||||||
|
d = data[10:]
|
||||||
|
return {
|
||||||
|
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||||
|
'amount': int(d[64:], 16)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def balance(tokens, holder_address, chain_str):
|
||||||
|
"""Return token balances for a list of tokens for given address
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:return: List of balances
|
||||||
|
:rtype: list of int
|
||||||
|
"""
|
||||||
|
#abi = ContractRegistry.abi('ERC20Token')
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
balances = []
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
for t in tokens:
|
||||||
|
#token = CICRegistry.get_address(t['address'])
|
||||||
|
#abi = token.abi()
|
||||||
|
#o = c.w3.eth.contract(abi=abi, address=t['address'])
|
||||||
|
o = CICRegistry.get_address(chain_spec, t['address']).contract
|
||||||
|
b = o.functions.balanceOf(holder_address).call()
|
||||||
|
logg.debug('balance {} for {}: {}'.format(t['address'], holder_address, b))
|
||||||
|
balances.append(b)
|
||||||
|
return b
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def transfer(self, tokens, holder_address, receiver_address, value, chain_str):
|
||||||
|
"""Transfer ERC20 tokens between addresses
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Token receiver address
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises TokenCountError: More than one token is passed in tokens list
|
||||||
|
:return: Transaction hash for tranfer operation
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
# retrieve the token interface
|
||||||
|
t = tokens[0]
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=holder_address)
|
||||||
|
|
||||||
|
txf = TokenTxFactory(holder_address, c)
|
||||||
|
|
||||||
|
tx_transfer = txf.transfer(t['address'], receiver_address, value, chain_spec)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, cache_task='cic_eth.eth.token.otx_cache_transfer')
|
||||||
|
|
||||||
|
gas_budget = tx_transfer['gas'] * tx_transfer['gasPrice']
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def approve(self, tokens, holder_address, spender_address, value, chain_str):
|
||||||
|
"""Approve ERC20 transfer on behalf of holder address
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Token receiver address
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises TokenCountError: More than one token is passed in tokens list
|
||||||
|
:return: Transaction hash for tranfer operation
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
# retrieve the token interface
|
||||||
|
t = tokens[0]
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec, holder_address=holder_address)
|
||||||
|
|
||||||
|
txf = TokenTxFactory(holder_address, c)
|
||||||
|
|
||||||
|
tx_transfer = txf.approve(t['address'], spender_address, value, chain_spec)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, cache_task='cic_eth.eth.token.otx_cache_approve')
|
||||||
|
|
||||||
|
gas_budget = tx_transfer['gas'] * tx_transfer['gasPrice']
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def resolve_tokens_by_symbol(token_symbols, chain_str):
|
||||||
|
"""Returns contract addresses of an array of ERC20 token symbols
|
||||||
|
|
||||||
|
:param token_symbols: Token symbols to resolve
|
||||||
|
:type token_symbols: list of str
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
|
||||||
|
:return: Respective token contract addresses
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
tokens = []
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
for token_symbol in token_symbols:
|
||||||
|
token = CICRegistry.get_token(chain_spec, token_symbol)
|
||||||
|
tokens.append({
|
||||||
|
'address': token.address(),
|
||||||
|
#'converters': [],
|
||||||
|
})
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def otx_cache_transfer(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for an ERC20.transfer or ERC20.transferFrom transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
(txc, cache_id) = cache_transfer_data(tx_hash_hex, tx)
|
||||||
|
return txc
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_transfer_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_transfer
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
tx_data = unpack_transfer(tx['data'])
|
||||||
|
logg.debug('tx data {}'.format(tx_data))
|
||||||
|
logg.debug('tx {}'.format(tx))
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx_data['to'],
|
||||||
|
tx['to'],
|
||||||
|
tx['to'],
|
||||||
|
tx_data['amount'],
|
||||||
|
tx_data['amount'],
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def otx_cache_approve(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for an ERC20.approve transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
(txc, cache_id) = cache_approve_data(tx_hash_hex, tx)
|
||||||
|
return txc
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_approve_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_approve
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
tx_data = unpack_approve(tx['data'])
|
||||||
|
logg.debug('tx data {}'.format(tx_data))
|
||||||
|
logg.debug('tx {}'.format(tx))
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx_data['to'],
|
||||||
|
tx['to'],
|
||||||
|
tx['to'],
|
||||||
|
tx_data['amount'],
|
||||||
|
tx_data['amount'],
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
681
apps/cic-eth/cic_eth/eth/tx.py
Normal file
681
apps/cic-eth/cic_eth/eth/tx.py
Normal file
@ -0,0 +1,681 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
import requests
|
||||||
|
import web3
|
||||||
|
from cic_registry import zero_address
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .rpc import RpcClient
|
||||||
|
from cic_eth.db import Otx, SessionBase
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.lock import Lock
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
from cic_eth.error import PermanentTxError
|
||||||
|
from cic_eth.error import TemporaryTxError
|
||||||
|
from cic_eth.error import NotLocalTxError
|
||||||
|
from cic_eth.queue.tx import create as queue_create
|
||||||
|
from cic_eth.queue.tx import get_tx
|
||||||
|
from cic_eth.queue.tx import get_nonce_tx
|
||||||
|
from cic_eth.error import OutOfGasError
|
||||||
|
from cic_eth.error import LockedError
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.eth.task import sign_and_register_tx, create_check_gas_and_send_task
|
||||||
|
from cic_eth.eth.task import sign_tx
|
||||||
|
from cic_eth.eth.nonce import NonceOracle
|
||||||
|
from cic_eth.error import AlreadyFillingGasError
|
||||||
|
from cic_eth.eth.util import tx_hex_string
|
||||||
|
from cic_eth.admin.ctrl import lock_send
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
MAX_NONCE_ATTEMPTS = 3
|
||||||
|
|
||||||
|
|
||||||
|
# TODO this function is too long
|
||||||
|
@celery_app.task(bind=True, throws=(OutOfGasError))
|
||||||
|
def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=None):
|
||||||
|
"""Check the gas level of the sender address of a transaction.
|
||||||
|
|
||||||
|
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
|
||||||
|
|
||||||
|
If account balance is sufficient, but level of gas before spend is below "safe" threshold, gas refill is requested, and execution continues normally.
|
||||||
|
|
||||||
|
:param tx_hashes: Transaction hashes due to be submitted
|
||||||
|
:type tx_hashes: list of str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param txs: Signed raw transaction data, corresponding to tx_hashes
|
||||||
|
:type txs: list of str, 0x-hex
|
||||||
|
:param address: Sender address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param gas_required: Gas limit * gas price for transaction, (optional, if not set will be retrived from transaction data)
|
||||||
|
:type gas_required: int
|
||||||
|
:return: Signed raw transaction data list
|
||||||
|
:rtype: param txs, unchanged
|
||||||
|
"""
|
||||||
|
if len(txs) == 0:
|
||||||
|
for i in range(len(tx_hashes)):
|
||||||
|
o = get_tx(tx_hashes[i])
|
||||||
|
txs.append(o['signed_tx'])
|
||||||
|
logg.debug('ooooo {}'.format(o))
|
||||||
|
if address == None:
|
||||||
|
address = o['address']
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
#c = RpcClient(chain_spec, holder_address=address)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
# TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx
|
||||||
|
balance = c.w3.eth.getBalance(address)
|
||||||
|
logg.debug('check gas txs {}'.format(tx_hashes))
|
||||||
|
logg.debug('address {} has gas {} needs {}'.format(address, balance, gas_required))
|
||||||
|
|
||||||
|
if gas_required > balance:
|
||||||
|
s_refill_gas = celery.signature(
|
||||||
|
'cic_eth.eth.tx.refill_gas',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_refill_gas.apply_async()
|
||||||
|
wait_tasks = []
|
||||||
|
for tx_hash in tx_hashes:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_waitforgas',
|
||||||
|
[
|
||||||
|
tx_hash,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
wait_tasks.append(s)
|
||||||
|
celery.group(wait_tasks)()
|
||||||
|
raise OutOfGasError('need to fill gas, required {}, had {}'.format(gas_required, balance))
|
||||||
|
|
||||||
|
safe_gas = c.safe_threshold_amount()
|
||||||
|
if balance < safe_gas:
|
||||||
|
s_refill_gas = celery.signature(
|
||||||
|
'cic_eth.eth.tx.refill_gas',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_refill_gas.apply_async()
|
||||||
|
logg.debug('requested refill from {} to {}'.format(c.gas_provider(), address))
|
||||||
|
ready_tasks = []
|
||||||
|
for tx_hash in tx_hashes:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_ready',
|
||||||
|
[
|
||||||
|
tx_hash,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
ready_tasks.append(s)
|
||||||
|
logg.debug('tasks {}'.format(ready_tasks))
|
||||||
|
celery.group(ready_tasks)()
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: chain chainable transactions that use hashes as inputs may be chained to this function to output signed txs instead.
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def hashes_to_txs(self, tx_hashes):
|
||||||
|
"""Return a list of raw signed transactions from the local transaction queue corresponding to a list of transaction hashes.
|
||||||
|
|
||||||
|
:param tx_hashes: Transaction hashes
|
||||||
|
:type tx_hashes: list of str, 0x-hex
|
||||||
|
:raises ValueError: Empty input list
|
||||||
|
:returns: Signed raw transactions
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
#logg = celery_app.log.get_default_logger()
|
||||||
|
if len(tx_hashes) == 0:
|
||||||
|
raise ValueError('no transaction to send')
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
#otxs = ','.format("'{}'".format(tx_hash) for tx_hash in tx_hashes)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx.signed_tx)
|
||||||
|
q = q.filter(Otx.tx_hash.in_(tx_hashes))
|
||||||
|
tx_tuples = q.all()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def __head(x):
|
||||||
|
return x[0]
|
||||||
|
|
||||||
|
txs = []
|
||||||
|
for f in map(__head, tx_tuples):
|
||||||
|
txs.append(f)
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Move this and send to subfolder submodule
|
||||||
|
class ParityNodeHandler:
|
||||||
|
def __init__(self, chain_spec, queue):
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.chain_str = str(chain_spec)
|
||||||
|
self.queue = queue
|
||||||
|
|
||||||
|
def handle(self, exception, tx_hash_hex, tx_hex):
|
||||||
|
meth = self.handle_default
|
||||||
|
if isinstance(exception, (ValueError)):
|
||||||
|
# s_debug = celery.signature(
|
||||||
|
# 'cic_eth.admin.debug.out_tmp',
|
||||||
|
# [tx_hash_hex, '{}: {}'.format(tx_hash_hex, exception)],
|
||||||
|
# queue=queue,
|
||||||
|
# )
|
||||||
|
# s_debug.apply_async()
|
||||||
|
earg = exception.args[0]
|
||||||
|
if earg['code'] == -32010:
|
||||||
|
logg.debug('skipping lock for code {}'.format(earg['code']))
|
||||||
|
meth = self.handle_invalid_parameters
|
||||||
|
elif earg['code'] == -32602:
|
||||||
|
meth = self.handle_invalid_encoding
|
||||||
|
else:
|
||||||
|
meth = self.handle_invalid
|
||||||
|
elif isinstance(exception, (requests.exceptions.ConnectionError)):
|
||||||
|
meth = self.handle_connection
|
||||||
|
(t, e_fn, message) = meth(tx_hash_hex, tx_hex)
|
||||||
|
return (t, e_fn, '{} {}'.format(message, exception))
|
||||||
|
|
||||||
|
|
||||||
|
def handle_connection(self, tx_hash_hex, tx_hex):
|
||||||
|
s_set_sent = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_sent_status',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
True,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s_set_sent.apply_async()
|
||||||
|
return (t, TemporaryTxError, 'Sendfail {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||||
|
|
||||||
|
|
||||||
|
def handle_invalid_encoding(self, tx_hash_hex, tx_hex):
|
||||||
|
tx_bytes = bytes.fromhex(tx_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
||||||
|
s_lock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.lock_send',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
self.chain_str,
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_set_reject = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_rejected',
|
||||||
|
[],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
nonce_txs = get_nonce_tx(tx['nonce'], tx['from'], self.chain_spec.chain_id())
|
||||||
|
attempts = len(nonce_txs)
|
||||||
|
if attempts < MAX_NONCE_ATTEMPTS:
|
||||||
|
logg.debug('nonce {} address {} retries {} < {}'.format(tx['nonce'], tx['from'], attempts, MAX_NONCE_ATTEMPTS))
|
||||||
|
s_resend = celery.signature(
|
||||||
|
'cic_eth.eth.tx.resend_with_higher_gas',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
None,
|
||||||
|
1.01,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_unlock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock_send',
|
||||||
|
[
|
||||||
|
self.chain_str,
|
||||||
|
tx['from'],
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_resend.link(s_unlock)
|
||||||
|
s_set_reject.link(s_resend)
|
||||||
|
|
||||||
|
s_lock.link(s_set_reject)
|
||||||
|
t = s_lock.apply_async()
|
||||||
|
return (t, PermanentTxError, 'Reject invalid encoding {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||||
|
|
||||||
|
|
||||||
|
def handle_invalid_parameters(self, tx_hash_hex, tx_hex):
|
||||||
|
s_sync = celery.signature(
|
||||||
|
'cic_eth.eth.tx.sync_tx',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
self.chain_str,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
t = s_sync.apply_async()
|
||||||
|
return (t, PermanentTxError, 'Reject invalid parameters {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||||
|
|
||||||
|
|
||||||
|
def handle_invalid(self, tx_hash_hex, tx_hex):
|
||||||
|
tx_bytes = bytes.fromhex(tx_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
||||||
|
s_lock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.lock_send',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
self.chain_str,
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_set_reject = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_rejected',
|
||||||
|
[],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_lock.link(s_set_reject)
|
||||||
|
t = s_lock.apply_async()
|
||||||
|
return (t, PermanentTxError, 'Reject invalid {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||||
|
|
||||||
|
|
||||||
|
def handle_default(self, tx_hash_hex, tx_hex):
|
||||||
|
tx_bytes = bytes.fromhex(tx_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
||||||
|
s_lock = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.lock_send',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
self.chain_str,
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_set_fubar = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_fubar',
|
||||||
|
[],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_lock.link(s_set_fubar)
|
||||||
|
t = s_lock.apply_async()
|
||||||
|
return (t, PermanentTxError, 'Fubar {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def send(self, txs, chain_str):
|
||||||
|
"""Send transactions to the network.
|
||||||
|
|
||||||
|
If more than one transaction is passed to the task, it will spawn a new send task with the remaining transaction(s) after the first in the list has been processed.
|
||||||
|
|
||||||
|
Updates the outgoing transaction queue entry to SENT on successful send.
|
||||||
|
|
||||||
|
If a temporary error occurs, the queue entry is set to SENDFAIL.
|
||||||
|
|
||||||
|
If a permanent error occurs due to invalid transaction data, queue entry value is set to REJECTED.
|
||||||
|
|
||||||
|
Any other permanent error that isn't explicitly handled will get value FUBAR.
|
||||||
|
|
||||||
|
:param txs: Signed raw transaction data
|
||||||
|
:type txs: list of str, 0x-hex
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises TemporaryTxError: If unable to connect to node
|
||||||
|
:raises PermanentTxError: If EVM execution fails immediately due to tx input, or if tx contents are invalid.
|
||||||
|
:return: transaction hash of sent transaction
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
if len(txs) == 0:
|
||||||
|
raise ValueError('no transaction to send')
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
|
||||||
|
tx_hex = txs[0]
|
||||||
|
logg.debug('send transaction {}'.format(tx_hex))
|
||||||
|
|
||||||
|
tx_hash = web3.Web3.keccak(hexstr=tx_hex)
|
||||||
|
tx_hash_hex = tx_hash.hex()
|
||||||
|
|
||||||
|
queue = self.request.delivery_info.get('routing_key', None)
|
||||||
|
if queue == None:
|
||||||
|
logg.debug('send tx {} has no queue', tx_hash)
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
r = None
|
||||||
|
try:
|
||||||
|
r = c.w3.eth.send_raw_transaction(tx_hex)
|
||||||
|
except Exception as e:
|
||||||
|
logg.debug('e {}'.format(e))
|
||||||
|
raiser = ParityNodeHandler(chain_spec, queue)
|
||||||
|
(t, e, m) = raiser.handle(e, tx_hash_hex, tx_hex)
|
||||||
|
raise e(m)
|
||||||
|
|
||||||
|
s_set_sent = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_sent_status',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
False
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_set_sent.apply_async()
|
||||||
|
|
||||||
|
tx_tail = txs[1:]
|
||||||
|
if len(tx_tail) > 0:
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.tx.send',
|
||||||
|
[tx_tail],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
return r.hex()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, throws=(AlreadyFillingGasError))
|
||||||
|
def refill_gas(self, recipient_address, chain_str):
|
||||||
|
"""Executes a native token transaction to fund the recipient's gas expenditures.
|
||||||
|
|
||||||
|
:param recipient_address: Recipient in need of gas
|
||||||
|
:type recipient_address: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises AlreadyFillingGasError: A gas refill transaction for this address is already executing
|
||||||
|
:returns: Transaction hash.
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx.tx_hash)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(Otx.status<=0)
|
||||||
|
q = q.filter(TxCache.from_value!='0x00')
|
||||||
|
q = q.filter(TxCache.recipient==recipient_address)
|
||||||
|
c = q.count()
|
||||||
|
session.close()
|
||||||
|
if c > 0:
|
||||||
|
raise AlreadyFillingGasError(recipient_address)
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
clogg = celery_app.log.get_default_logger()
|
||||||
|
logg.debug('refill gas from provider address {}'.format(c.gas_provider()))
|
||||||
|
default_nonce = c.w3.eth.getTransactionCount(c.gas_provider(), 'pending')
|
||||||
|
nonce_generator = NonceOracle(c.gas_provider(), default_nonce)
|
||||||
|
nonce = nonce_generator.next()
|
||||||
|
gas_price = c.gas_price()
|
||||||
|
gas_limit = c.default_gas_limit
|
||||||
|
refill_amount = c.refill_amount()
|
||||||
|
logg.debug('gas price {} nonce {}'.format(gas_price, nonce))
|
||||||
|
|
||||||
|
# create and sign transaction
|
||||||
|
tx_send_gas = {
|
||||||
|
'from': c.gas_provider(),
|
||||||
|
'to': recipient_address,
|
||||||
|
'gas': gas_limit,
|
||||||
|
'gasPrice': gas_price,
|
||||||
|
'chainId': chain_spec.chain_id(),
|
||||||
|
'nonce': nonce,
|
||||||
|
'value': refill_amount,
|
||||||
|
'data': '',
|
||||||
|
}
|
||||||
|
logg.debug('txsend_gas {}'.format(tx_send_gas))
|
||||||
|
tx_send_gas_signed = c.w3.eth.sign_transaction(tx_send_gas)
|
||||||
|
tx_hash = web3.Web3.keccak(hexstr=tx_send_gas_signed['raw'])
|
||||||
|
tx_hash_hex = tx_hash.hex()
|
||||||
|
|
||||||
|
# TODO: route this through sign_and_register_tx instead
|
||||||
|
logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex))
|
||||||
|
queue_create(
|
||||||
|
nonce,
|
||||||
|
c.gas_provider(),
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_send_gas_signed['raw'],
|
||||||
|
chain_str,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_tx_cache = celery.signature(
|
||||||
|
'cic_eth.eth.tx.cache_gas_refill_data',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_send_gas,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_status = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_ready',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
celery.group(s_tx_cache, s_status)()
|
||||||
|
return tx_send_gas_signed['raw']
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_factor=1.1):
|
||||||
|
"""Create a new transaction from an existing one with same nonce and higher gas price.
|
||||||
|
|
||||||
|
:param txold_hash_hex: Transaction to re-create
|
||||||
|
:type txold_hash_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param gas: Explicitly use the specified gas amount
|
||||||
|
:type gas: number
|
||||||
|
:param default_factor: Default factor by which to increment the gas price by
|
||||||
|
:type default_factor: float
|
||||||
|
:raises NotLocalTxError: Transaction does not exist in the local queue
|
||||||
|
:returns: Transaction hash
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
otx = session.query(Otx).filter(Otx.tx_hash==txold_hash_hex).first()
|
||||||
|
if otx == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError(txold_hash_hex)
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
logg.debug('otx {} {}'.format(tx, otx.signed_tx))
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
logg.debug('before {}'.format(tx))
|
||||||
|
if gas != None:
|
||||||
|
tx['gasPrice'] = gas
|
||||||
|
else:
|
||||||
|
gas_price = c.gas_price()
|
||||||
|
if tx['gasPrice'] > gas_price:
|
||||||
|
logg.warning('Network gas price {} is lower than overdue tx gas price {}'.format(gas_price, tx['gasPrice']))
|
||||||
|
#tx['gasPrice'] = int(tx['gasPrice'] * default_factor)
|
||||||
|
tx['gasPrice'] += 1
|
||||||
|
else:
|
||||||
|
new_gas_price = int(tx['gasPrice'] * default_factor)
|
||||||
|
if gas_price > new_gas_price:
|
||||||
|
tx['gasPrice'] = gas_price
|
||||||
|
else:
|
||||||
|
tx['gasPrice'] = new_gas_price
|
||||||
|
|
||||||
|
logg.debug('after {}'.format(tx))
|
||||||
|
|
||||||
|
#(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx, chain_str, queue)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
||||||
|
queue_create(
|
||||||
|
tx['nonce'],
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
)
|
||||||
|
TxCache.clone(txold_hash_hex, tx_hash_hex)
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
tx['from'],
|
||||||
|
tx['gasPrice'] * tx['gas'],
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,))
|
||||||
|
def sync_tx(self, tx_hash_hex, chain_str):
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
tx = c.w3.eth.getTransaction(tx_hash_hex)
|
||||||
|
rcpt = None
|
||||||
|
try:
|
||||||
|
rcpt = c.w3.eth.getTransactionReceipt(tx_hash_hex)
|
||||||
|
except web3.exceptions.TransactionNotFound as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if rcpt != None:
|
||||||
|
success = rcpt['status'] == 1
|
||||||
|
logg.debug('sync tx {} mined block {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], success))
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_final_status',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
rcpt['blockNumber'],
|
||||||
|
not success,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logg.debug('sync tx {} mempool'.format(tx_hash_hex))
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_sent_status',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def resume_tx(self, txpending_hash_hex, chain_str):
|
||||||
|
"""Queue a suspended tranaction for (re)sending
|
||||||
|
|
||||||
|
:param txpending_hash_hex: Transaction hash
|
||||||
|
:type txpending_hash_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec, string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises NotLocalTxError: Transaction does not exist in the local queue
|
||||||
|
:returns: Transaction hash
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
r = session.query(Otx.signed_tx).filter(Otx.tx_hash==txpending_hash_hex).first()
|
||||||
|
session.close()
|
||||||
|
if r == None:
|
||||||
|
raise NotLocalTxError(txpending_hash_hex)
|
||||||
|
|
||||||
|
tx_signed_raw_hex = r[0]
|
||||||
|
tx_signed_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_spec.chain_id())
|
||||||
|
|
||||||
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_str,
|
||||||
|
tx['from'],
|
||||||
|
tx['gasPrice'] * tx['gas'],
|
||||||
|
[txpending_hash_hex],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return txpending_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def otx_cache_parse_tx(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_str,
|
||||||
|
):
|
||||||
|
"""Generates and commits transaction cache metadata for a gas refill transaction
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx_signed_raw_hex: Raw signed transaction
|
||||||
|
:type tx_signed_raw_hex: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
(txc, cache_id) = cache_gas_refill_data(tx_hash_hex, tx)
|
||||||
|
return txc
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def cache_gas_refill_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_parse_tx
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
tx['to'],
|
||||||
|
zero_address,
|
||||||
|
zero_address,
|
||||||
|
tx['value'],
|
||||||
|
tx['value'],
|
||||||
|
)
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
102
apps/cic-eth/cic_eth/eth/util.py
Normal file
102
apps/cic-eth/cic_eth/eth/util.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import sha3
|
||||||
|
import web3
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from rlp import decode as rlp_decode
|
||||||
|
from rlp import encode as rlp_encode
|
||||||
|
from eth_keys import KeyAPI
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
field_debugs = [
|
||||||
|
'nonce',
|
||||||
|
'gasPrice',
|
||||||
|
'gas',
|
||||||
|
'to',
|
||||||
|
'value',
|
||||||
|
'data',
|
||||||
|
'v',
|
||||||
|
'r',
|
||||||
|
's',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_signed_raw_tx(tx_raw_bytes, chain_id):
|
||||||
|
d = rlp_decode(tx_raw_bytes)
|
||||||
|
|
||||||
|
logg.debug('decoding using chain id {}'.format(chain_id))
|
||||||
|
j = 0
|
||||||
|
for i in d:
|
||||||
|
logg.debug('decoded {}: {}'.format(field_debugs[j], i.hex()))
|
||||||
|
j += 1
|
||||||
|
vb = chain_id
|
||||||
|
if chain_id != 0:
|
||||||
|
v = int.from_bytes(d[6], 'big')
|
||||||
|
vb = v - (chain_id * 2) - 35
|
||||||
|
s = b''.join([d[7], d[8], bytes([vb])])
|
||||||
|
so = KeyAPI.Signature(signature_bytes=s)
|
||||||
|
|
||||||
|
h = sha3.keccak_256()
|
||||||
|
h.update(rlp_encode(d))
|
||||||
|
signed_hash = h.digest()
|
||||||
|
|
||||||
|
d[6] = chain_id
|
||||||
|
d[7] = b''
|
||||||
|
d[8] = b''
|
||||||
|
|
||||||
|
h = sha3.keccak_256()
|
||||||
|
h.update(rlp_encode(d))
|
||||||
|
unsigned_hash = h.digest()
|
||||||
|
|
||||||
|
p = so.recover_public_key_from_msg_hash(unsigned_hash)
|
||||||
|
a = p.to_checksum_address()
|
||||||
|
logg.debug('decoded recovery byte {}'.format(vb))
|
||||||
|
logg.debug('decoded address {}'.format(a))
|
||||||
|
logg.debug('decoded signed hash {}'.format(signed_hash.hex()))
|
||||||
|
logg.debug('decoded unsigned hash {}'.format(unsigned_hash.hex()))
|
||||||
|
|
||||||
|
to = d[3].hex() or None
|
||||||
|
if to != None:
|
||||||
|
to = web3.Web3.toChecksumAddress('0x' + to)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'from': a,
|
||||||
|
'nonce': int.from_bytes(d[0], 'big'),
|
||||||
|
'gasPrice': int.from_bytes(d[1], 'big'),
|
||||||
|
'gas': int.from_bytes(d[2], 'big'),
|
||||||
|
'to': to,
|
||||||
|
'value': int.from_bytes(d[4], 'big'),
|
||||||
|
'data': '0x' + d[5].hex(),
|
||||||
|
'v': chain_id,
|
||||||
|
'r': '0x' + s[:32].hex(),
|
||||||
|
's': '0x' + s[32:64].hex(),
|
||||||
|
'chainId': chain_id,
|
||||||
|
'hash': '0x' + signed_hash.hex(),
|
||||||
|
'hash_unsigned': '0x' + unsigned_hash.hex(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_signed_raw_tx_hex(tx_raw_hex, chain_id):
|
||||||
|
return unpack_signed_raw_tx(bytes.fromhex(tx_raw_hex[2:]), chain_id)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: consider moving tx string representation generation from api_admin to here
|
||||||
|
def tx_string(tx_raw_bytes, chain_id):
|
||||||
|
tx_unpacked = unpack_signed_raw_tx(tx_raw_bytes, chain_id)
|
||||||
|
return 'tx nonce {} from {} to {} hash {}'.format(
|
||||||
|
tx_unpacked['nonce'],
|
||||||
|
tx_unpacked['from'],
|
||||||
|
tx_unpacked['to'],
|
||||||
|
tx_unpacked['hash'],
|
||||||
|
)
|
||||||
|
|
||||||
|
def tx_hex_string(tx_hex, chain_id):
|
||||||
|
if len(tx_hex) < 2:
|
||||||
|
raise ValueError('invalid data length')
|
||||||
|
elif tx_hex[:2] == '0x':
|
||||||
|
tx_hex = tx_hex[2:]
|
||||||
|
|
||||||
|
tx_raw_bytes = bytes.fromhex(tx_hex)
|
||||||
|
return tx_string(tx_raw_bytes, chain_id)
|
4
apps/cic-eth/cic_eth/queue/__init__.py
Normal file
4
apps/cic-eth/cic_eth/queue/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
from .tx import get_tx
|
654
apps/cic-eth/cic_eth/queue/tx.py
Normal file
654
apps/cic-eth/cic_eth/queue/tx.py
Normal file
@ -0,0 +1,654 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
from sqlalchemy import or_
|
||||||
|
from sqlalchemy import tuple_
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db.models.otx import OtxStateLog
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.lock import Lock
|
||||||
|
from cic_eth.db import SessionBase
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx # TODO: should not be in same sub-path as package that imports queue.tx
|
||||||
|
from cic_eth.error import NotLocalTxError
|
||||||
|
from cic_eth.error import LockedError
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
#logg = celery_app.log.get_default_logger()
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predecessors=True):
|
||||||
|
"""Create a new transaction queue record.
|
||||||
|
|
||||||
|
:param nonce: Transaction nonce
|
||||||
|
:type nonce: int
|
||||||
|
:param holder_address: Sender address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param signed_tx: Signed raw transaction
|
||||||
|
:type signed_tx: str, 0x-hex
|
||||||
|
:param chain_str: Chain spec string representation to create transaction for
|
||||||
|
:type chain_str: str
|
||||||
|
:returns: transaction hash
|
||||||
|
:rtype: str, 0x-hash
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
lock = Lock.check_aggregate(chain_str, LockEnum.QUEUE, holder_address, session=session)
|
||||||
|
if lock > 0:
|
||||||
|
session.close()
|
||||||
|
raise LockedError(lock)
|
||||||
|
|
||||||
|
o = Otx.add(
|
||||||
|
nonce=nonce,
|
||||||
|
address=holder_address,
|
||||||
|
tx_hash=tx_hash,
|
||||||
|
signed_tx=signed_tx,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
if obsolete_predecessors:
|
||||||
|
# TODO: obsolete previous txs from same holder with same nonce
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(Otx.nonce==nonce)
|
||||||
|
q = q.filter(TxCache.sender==holder_address)
|
||||||
|
q = q.filter(Otx.tx_hash!=tx_hash)
|
||||||
|
q = q.filter(Otx.status<=StatusEnum.SENT)
|
||||||
|
|
||||||
|
for otx in q.all():
|
||||||
|
logg.info('otx {} obsoleted by {}'.format(otx.tx_hash, tx_hash))
|
||||||
|
if otx.status == StatusEnum.SENT:
|
||||||
|
otx.cancel(False, session=session)
|
||||||
|
elif otx.status != StatusEnum.OBSOLETED:
|
||||||
|
otx.override(session=session)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
logg.debug('queue created nonce {} from {} hash {}'.format(nonce, holder_address, tx_hash))
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Replace set_* with single task for set status
|
||||||
|
@celery_app.task()
|
||||||
|
def set_sent_status(tx_hash, fail=False):
|
||||||
|
"""Used to set the status after a send attempt
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param fail: if True, will set a SENDFAIL status, otherwise a SENT status. (Default: False)
|
||||||
|
:type fail: boolean
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
:returns: True if tx is known, False otherwise
|
||||||
|
:rtype: boolean
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
logg.warning('not local tx, skipping {}'.format(tx_hash))
|
||||||
|
session.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
if fail:
|
||||||
|
o.sendfail(session=session)
|
||||||
|
else:
|
||||||
|
o.sent(session=session)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_final_status(tx_hash, block=None, fail=False):
|
||||||
|
"""Used to set the status of an incoming transaction result.
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param block: Block number if final status represents a confirmation on the network
|
||||||
|
:type block: number
|
||||||
|
:param fail: if True, will set a SUCCESS status, otherwise a REVERTED status. (Default: False)
|
||||||
|
:type fail: boolean
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(
|
||||||
|
Otx.nonce.label('nonce'),
|
||||||
|
TxCache.sender.label('sender'),
|
||||||
|
Otx.id.label('otxid'),
|
||||||
|
)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
o = q.first()
|
||||||
|
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
nonce = o.nonce
|
||||||
|
sender = o.sender
|
||||||
|
otxid = o.otxid
|
||||||
|
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
o = q.first()
|
||||||
|
|
||||||
|
if fail:
|
||||||
|
o.minefail(block, session=session)
|
||||||
|
else:
|
||||||
|
o.success(block, session=session)
|
||||||
|
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(Otx.nonce==nonce)
|
||||||
|
q = q.filter(TxCache.sender==sender)
|
||||||
|
q = q.filter(Otx.tx_hash!=tx_hash)
|
||||||
|
|
||||||
|
for otwo in q.all():
|
||||||
|
otwo.cancel(True, session=session)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_cancel(tx_hash, manual=False):
|
||||||
|
"""Used to set the status when a transaction is cancelled.
|
||||||
|
|
||||||
|
Will set the state to CANCELLED or OVERRIDDEN
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param manual: If set, status will be OVERRIDDEN. Otherwise CANCELLED.
|
||||||
|
:type manual: boolean
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
if manual:
|
||||||
|
o.override(session=session)
|
||||||
|
else:
|
||||||
|
o.cancel(session=session)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_rejected(tx_hash):
|
||||||
|
"""Used to set the status when the node rejects sending a transaction to network
|
||||||
|
|
||||||
|
Will set the state to REJECTED
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
o.reject(session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_fubar(tx_hash):
|
||||||
|
"""Used to set the status when an unexpected error occurs.
|
||||||
|
|
||||||
|
Will set the state to FUBAR
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
o.fubar(session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_ready(tx_hash):
|
||||||
|
"""Used to mark a transaction as ready to be sent to network
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
if o.status == StatusEnum.WAITFORGAS or o.status == StatusEnum.PENDING:
|
||||||
|
o.readysend(session=session)
|
||||||
|
else:
|
||||||
|
o.retry(session=session)
|
||||||
|
|
||||||
|
logg.debug('ot otx otx {} {}'.format(tx_hash, o))
|
||||||
|
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def set_waitforgas(tx_hash):
|
||||||
|
"""Used to set the status when a transaction must be deferred due to gas refill
|
||||||
|
|
||||||
|
Will set the state to WAITFORGAS
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
"""
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if o == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
o.waitforgas(session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_state_log(tx_hash):
|
||||||
|
|
||||||
|
logs = []
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
q = session.query(OtxStateLog)
|
||||||
|
q = q.join(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
q = q.order_by(OtxStateLog.date.asc())
|
||||||
|
for l in q.all():
|
||||||
|
logs.append((l.date, l.status,))
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return logs
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_tx_cache(tx_hash):
|
||||||
|
"""Returns an aggregate dictionary of outgoing transaction data and metadata
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
:returns: Transaction data
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
|
otx = q.first()
|
||||||
|
|
||||||
|
if otx == None:
|
||||||
|
session.close()
|
||||||
|
raise NotLocalTxError(tx_hash)
|
||||||
|
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
q = session.query(TxCache)
|
||||||
|
q = q.filter(TxCache.otx_id==otx.id)
|
||||||
|
txc = q.first()
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
values = txc.values()
|
||||||
|
tx = {
|
||||||
|
'tx_hash': otx.tx_hash,
|
||||||
|
'signed_tx': otx.signed_tx,
|
||||||
|
'nonce': otx.nonce,
|
||||||
|
'status': StatusEnum(otx.status).name,
|
||||||
|
'status_code': otx.status,
|
||||||
|
'source_token': txc.source_token_address,
|
||||||
|
'destination_token': txc.destination_token_address,
|
||||||
|
'sender': txc.sender,
|
||||||
|
'recipient': txc.recipient,
|
||||||
|
'from_value': values[0],
|
||||||
|
'to_value': values[1],
|
||||||
|
'date_created': txc.date_created,
|
||||||
|
'date_updated': txc.date_updated,
|
||||||
|
'date_checked': txc.date_checked,
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_lock(address=None):
|
||||||
|
"""Retrieve all active locks
|
||||||
|
|
||||||
|
If address is set, the query will look up the lock for the specified address only. A list of zero or one elements is returned, depending on whether a lock is set or not.
|
||||||
|
|
||||||
|
:param address: Get lock for only the specified address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: List of locks
|
||||||
|
:rtype: list of dicts
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(
|
||||||
|
Lock.date_created,
|
||||||
|
Lock.address,
|
||||||
|
Lock.flags,
|
||||||
|
Otx.tx_hash,
|
||||||
|
)
|
||||||
|
q = q.join(Otx, isouter=True)
|
||||||
|
if address != None:
|
||||||
|
q = q.filter(Lock.address==address)
|
||||||
|
else:
|
||||||
|
q = q.order_by(Lock.date_created.asc())
|
||||||
|
|
||||||
|
locks = []
|
||||||
|
for lock in q.all():
|
||||||
|
o = {
|
||||||
|
'date': lock[0],
|
||||||
|
'address': lock[1],
|
||||||
|
'tx_hash': lock[3],
|
||||||
|
'flags': lock[2],
|
||||||
|
}
|
||||||
|
locks.append(o)
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return locks
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_tx(tx_hash):
|
||||||
|
"""Retrieve a transaction queue record by transaction hash
|
||||||
|
|
||||||
|
:param tx_hash: Transaction hash of record to modify
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:raises NotLocalTxError: If transaction not found in queue.
|
||||||
|
:returns: nonce, address and signed_tx (raw signed transaction)
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||||
|
if tx == None:
|
||||||
|
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
o = {
|
||||||
|
'otx_id': tx.id,
|
||||||
|
'nonce': tx.nonce,
|
||||||
|
'signed_tx': tx.signed_tx,
|
||||||
|
'status': tx.status,
|
||||||
|
}
|
||||||
|
logg.debug('get tx {}'.format(o))
|
||||||
|
session.close()
|
||||||
|
return o
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_nonce_tx(nonce, sender, chain_id):
|
||||||
|
"""Retrieve all transactions for address with specified nonce
|
||||||
|
|
||||||
|
:param nonce: Nonce
|
||||||
|
:type nonce: number
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:returns: Transactions
|
||||||
|
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(TxCache.sender==sender)
|
||||||
|
q = q.filter(Otx.nonce==nonce)
|
||||||
|
|
||||||
|
txs = {}
|
||||||
|
for r in q.all():
|
||||||
|
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
||||||
|
if sender == None or tx['from'] == sender:
|
||||||
|
txs[r.tx_hash] = r.signed_tx
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: pass chain spec instead of chain id
|
||||||
|
def get_paused_txs(status=None, sender=None, chain_id=0):
|
||||||
|
"""Returns not finalized transactions that have been attempted sent without success.
|
||||||
|
|
||||||
|
:param status: If set, will return transactions with this local queue status only
|
||||||
|
:type status: cic_eth.db.enum.StatusEnum
|
||||||
|
:param recipient: Recipient address to return transactions for
|
||||||
|
:type recipient: str, 0x-hex
|
||||||
|
:param chain_id: Numeric chain id to use to parse signed transaction data
|
||||||
|
:type chain_id: number
|
||||||
|
:raises ValueError: Status is finalized, sent or never attempted sent
|
||||||
|
:returns: Transactions
|
||||||
|
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
|
||||||
|
if status != None:
|
||||||
|
if status == StatusEnum.PENDING or status >= StatusEnum.SENT:
|
||||||
|
raise ValueError('not a valid paused tx value: {}'.format(status))
|
||||||
|
q = q.filter(Otx.status==status)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
else:
|
||||||
|
q = q.filter(Otx.status>StatusEnum.PENDING)
|
||||||
|
q = q.filter(Otx.status<StatusEnum.SENT)
|
||||||
|
|
||||||
|
if sender != None:
|
||||||
|
q = q.filter(TxCache.sender==sender)
|
||||||
|
|
||||||
|
txs = {}
|
||||||
|
|
||||||
|
for r in q.all():
|
||||||
|
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
||||||
|
if sender == None or tx['from'] == sender:
|
||||||
|
#gas += tx['gas'] * tx['gasPrice']
|
||||||
|
txs[r.tx_hash] = r.signed_tx
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
def get_status_tx(status, before=None, limit=0):
|
||||||
|
"""Retrieve transaction with a specific queue status.
|
||||||
|
|
||||||
|
:param status: Status to match transactions with
|
||||||
|
:type status: str
|
||||||
|
:param before: If set, return only transactions older than the timestamp
|
||||||
|
:type status: datetime.dateTime
|
||||||
|
:param limit: Limit amount of returned transactions
|
||||||
|
:type limit: number
|
||||||
|
:returns: Transactions
|
||||||
|
:rtype: list of cic_eth.db.models.otx.Otx
|
||||||
|
"""
|
||||||
|
txs = {}
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(TxCache.date_updated<before)
|
||||||
|
q = q.filter(Otx.status==status)
|
||||||
|
i = 0
|
||||||
|
for o in q.all():
|
||||||
|
if limit > 0 and i == limit:
|
||||||
|
break
|
||||||
|
txs[o.tx_hash] = o.signed_tx
|
||||||
|
i += 1
|
||||||
|
session.close()
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: move query to model
|
||||||
|
def get_upcoming_tx(status=StatusEnum.READYSEND, recipient=None, before=None, chain_id=0):
|
||||||
|
"""Returns the next pending transaction, specifically the transaction with the lowest nonce, for every recipient that has pending transactions.
|
||||||
|
|
||||||
|
Will omit addresses that have the LockEnum.SEND bit in Lock set.
|
||||||
|
|
||||||
|
(TODO) Will not return any rows if LockEnum.SEND bit in Lock is set for zero address.
|
||||||
|
|
||||||
|
:param status: Defines the status used to filter as upcoming.
|
||||||
|
:type status: cic_eth.db.enum.StatusEnum
|
||||||
|
:param recipient: Ethereum address of recipient to return transaction for
|
||||||
|
:type recipient: str, 0x-hex
|
||||||
|
:param before: Only return transactions if their modification date is older than the given timestamp
|
||||||
|
:type before: datetime.datetime
|
||||||
|
:param chain_id: Chain id to use to parse signed transaction data
|
||||||
|
:type chain_id: number
|
||||||
|
:raises ValueError: Status is finalized, sent or never attempted sent
|
||||||
|
:returns: Transactions
|
||||||
|
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||||
|
"""
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(
|
||||||
|
TxCache.sender,
|
||||||
|
func.min(Otx.nonce).label('nonce'),
|
||||||
|
)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.join(Lock, isouter=True)
|
||||||
|
q = q.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0))
|
||||||
|
|
||||||
|
if status >= StatusEnum.SENT:
|
||||||
|
raise ValueError('not a valid non-final tx value: {}'.format(s))
|
||||||
|
q = q.filter(Otx.status==status)
|
||||||
|
|
||||||
|
if recipient != None:
|
||||||
|
q = q.filter(TxCache.recipient==recipient)
|
||||||
|
|
||||||
|
q = q.group_by(TxCache.sender)
|
||||||
|
|
||||||
|
txs = {}
|
||||||
|
|
||||||
|
results = q.all()
|
||||||
|
for r in results:
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
q = q.filter(TxCache.sender==r.sender)
|
||||||
|
q = q.filter(Otx.nonce==r.nonce)
|
||||||
|
|
||||||
|
if before != None:
|
||||||
|
q = q.filter(TxCache.date_checked<before)
|
||||||
|
|
||||||
|
q = q.order_by(TxCache.date_created.desc())
|
||||||
|
|
||||||
|
o = q.first()
|
||||||
|
|
||||||
|
# TODO: audit; should this be possible if a row is found in the initial query? If not, at a minimum log error.
|
||||||
|
if o == None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tx_signed_bytes = bytes.fromhex(o.signed_tx[2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
||||||
|
txs[o.tx_hash] = o.signed_tx
|
||||||
|
|
||||||
|
q = session.query(TxCache)
|
||||||
|
q = q.filter(TxCache.otx_id==o.id)
|
||||||
|
o = q.first()
|
||||||
|
|
||||||
|
logg.debug('oooo {}'.format(o))
|
||||||
|
o.date_checked = datetime.datetime.now()
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task()
|
||||||
|
def get_account_tx(address, as_sender=True, as_recipient=True, counterpart=None):
|
||||||
|
"""Returns all local queue transactions for a given Ethereum address
|
||||||
|
|
||||||
|
:param address: Ethereum address
|
||||||
|
:type address: str, 0x-hex
|
||||||
|
:param as_sender: If False, will omit transactions where address is sender
|
||||||
|
:type as_sender: bool
|
||||||
|
:param as_sender: If False, will omit transactions where address is recipient
|
||||||
|
:type as_sender: bool
|
||||||
|
:param counterpart: Only return transactions where this Ethereum address is the other end of the transaction (not in use)
|
||||||
|
:type counterpart: str, 0x-hex
|
||||||
|
:raises ValueError: If address is set to be neither sender nor recipient
|
||||||
|
:returns: Transactions
|
||||||
|
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||||
|
"""
|
||||||
|
if not as_sender and not as_recipient:
|
||||||
|
raise ValueError('at least one of as_sender and as_recipient must be True')
|
||||||
|
txs = {}
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(Otx)
|
||||||
|
q = q.join(TxCache)
|
||||||
|
if as_sender and as_recipient:
|
||||||
|
q = q.filter(or_(TxCache.sender==address, TxCache.recipient==address))
|
||||||
|
elif as_sender:
|
||||||
|
q = q.filter(TxCache.sender==address)
|
||||||
|
else:
|
||||||
|
q = q.filter(TxCache.recipient==address)
|
||||||
|
q = q.order_by(Otx.nonce.asc(), Otx.date_created.asc())
|
||||||
|
|
||||||
|
results = q.all()
|
||||||
|
for r in results:
|
||||||
|
txs[r.tx_hash] = r.signed_tx
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return txs
|
||||||
|
|
||||||
|
|
||||||
|
|
148
apps/cic-eth/cic_eth/runnable/ctrl.py
Normal file
148
apps/cic-eth/cic_eth/runnable/ctrl.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
# standard imports
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
from cic_registry import zero_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.api import AdminApi
|
||||||
|
from cic_eth.eth.rpc import RpcClient
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
logging.getLogger('web3').setLevel(logging.WARNING)
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
||||||
|
default_config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||||
|
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
|
||||||
|
argparser.add_argument('-f', '--format', dest='f', default='terminal', type=str, help='Output format')
|
||||||
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
|
||||||
|
def process_lock_args(argparser):
|
||||||
|
argparser.add_argument('flags', type=str, help='Flags to manipulate')
|
||||||
|
argparser.add_argument('address', default=zero_address, nargs='?', type=str, help='Ethereum address to unlock,')
|
||||||
|
|
||||||
|
sub = argparser.add_subparsers()
|
||||||
|
sub.dest = "command"
|
||||||
|
sub_lock = sub.add_parser('lock', help='Set or reset locks')
|
||||||
|
sub_unlock = sub.add_parser('unlock', help='Set or reset locks')
|
||||||
|
process_lock_args(sub_lock)
|
||||||
|
process_lock_args(sub_unlock)
|
||||||
|
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
args_override = {
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
}
|
||||||
|
# override args
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = web3.Web3.WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = web3.Web3.HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
|
||||||
|
|
||||||
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
chain_str = str(chain_spec)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
admin_api = AdminApi(c)
|
||||||
|
|
||||||
|
|
||||||
|
def lock_names_to_flag(s):
|
||||||
|
flagstrings = s.split(',')
|
||||||
|
flagstrings = map(lambda s: s.upper(), flagstrings)
|
||||||
|
flagsvalue = 0
|
||||||
|
for s in flagstrings:
|
||||||
|
v = getattr(LockEnum, s)
|
||||||
|
flagsvalue |= v
|
||||||
|
return flagsvalue
|
||||||
|
|
||||||
|
# TODO: move each command to submodule
|
||||||
|
def main():
|
||||||
|
if args.command == 'unlock':
|
||||||
|
flags = lock_names_to_flag(args.flags)
|
||||||
|
if not web3.Web3.isChecksumAddress(args.address):
|
||||||
|
raise ValueError('Invalid checksum address {}'.format(args.address))
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.unlock',
|
||||||
|
[
|
||||||
|
None,
|
||||||
|
chain_str,
|
||||||
|
args.address,
|
||||||
|
flags,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
logg.debug('unlock {} on {} task {}'.format(flags, args.address, t))
|
||||||
|
|
||||||
|
|
||||||
|
if args.command == 'lock':
|
||||||
|
flags = lock_names_to_flag(args.flags)
|
||||||
|
if not web3.Web3.isChecksumAddress(args.address):
|
||||||
|
raise ValueError('Invalid checksum address {}'.format(args.address))
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.lock',
|
||||||
|
[
|
||||||
|
None,
|
||||||
|
chain_str,
|
||||||
|
args.address,
|
||||||
|
flags,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
logg.debug('lock {} on {} task {}'.format(flags, args.address, t))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
162
apps/cic-eth/cic_eth/runnable/dispatcher.py
Normal file
162
apps/cic-eth/cic_eth/runnable/dispatcher.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_eth
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.db import SessionBase
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
from cic_eth.queue.tx import get_upcoming_tx
|
||||||
|
from cic_eth.admin.ctrl import lock_send
|
||||||
|
from cic_eth.sync.error import LoopDone
|
||||||
|
from cic_eth.eth.tx import send as task_tx_send
|
||||||
|
from cic_eth.error import PermanentTxError
|
||||||
|
from cic_eth.error import TemporaryTxError
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx_hex
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
|
||||||
|
run = True
|
||||||
|
|
||||||
|
|
||||||
|
class DispatchSyncer:
|
||||||
|
|
||||||
|
def __init__(self, chain_spec):
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.chain_id = chain_spec.chain_id()
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
return self.chain_spec
|
||||||
|
|
||||||
|
|
||||||
|
def process(self, w3, txs):
|
||||||
|
c = len(txs.keys())
|
||||||
|
logg.debug('processing {} txs {}'.format(c, list(txs.keys())))
|
||||||
|
chain_str = str(self.chain_spec)
|
||||||
|
for k in txs.keys():
|
||||||
|
tx_raw = txs[k]
|
||||||
|
tx = unpack_signed_raw_tx_hex(tx_raw, self.chain_spec.chain_id())
|
||||||
|
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[tx_raw],
|
||||||
|
chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
tx['from'],
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_send = celery.signature(
|
||||||
|
'cic_eth.eth.tx.send',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_send)
|
||||||
|
t = s_check.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def loop(self, w3, interval):
|
||||||
|
while run:
|
||||||
|
txs = {}
|
||||||
|
typ = StatusEnum.READYSEND
|
||||||
|
utxs = get_upcoming_tx(typ, chain_id=self.chain_id)
|
||||||
|
for k in utxs.keys():
|
||||||
|
txs[k] = utxs[k]
|
||||||
|
self.process(w3, txs)
|
||||||
|
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||||
|
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||||
|
|
||||||
|
syncer = DispatchSyncer(chain_spec)
|
||||||
|
try:
|
||||||
|
syncer.loop(c.w3, float(config.get('DISPATCHER_LOOP_INTERVAL')))
|
||||||
|
except LoopDone as e:
|
||||||
|
sys.stderr.write("dispatcher done at block {}\n".format(e))
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
410
apps/cic-eth/cic_eth/runnable/manager.py
Normal file
410
apps/cic-eth/cic_eth/runnable/manager.py
Normal file
@ -0,0 +1,410 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
import rlp
|
||||||
|
import web3
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
from cic_registry import zero_address
|
||||||
|
from cic_registry.chain import ChainRegistry
|
||||||
|
from cic_registry.error import UnknownContractError
|
||||||
|
from cic_bancor.bancor import BancorRegistryClient
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_eth
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.db import SessionBase
|
||||||
|
from cic_eth.db import Otx
|
||||||
|
from cic_eth.db import TxConvertTransfer
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
from cic_eth.queue.tx import get_paused_txs
|
||||||
|
from cic_eth.sync import Syncer
|
||||||
|
from cic_eth.sync.error import LoopDone
|
||||||
|
from cic_eth.db.error import UnknownConvertError
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||||
|
from cic_eth.sync.backend import SyncerBackend
|
||||||
|
from cic_eth.eth.token import unpack_transfer
|
||||||
|
from cic_eth.eth.token import unpack_transferfrom
|
||||||
|
from cic_eth.eth.account import unpack_gift
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
|
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
argparser.add_argument('mode', type=str, help='sync mode: (head|history)', default='head')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
# TODO: There is too much code in this file, split it up
|
||||||
|
|
||||||
|
transfer_callbacks = []
|
||||||
|
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
|
||||||
|
task_split = cb.split(':')
|
||||||
|
task_queue = queue
|
||||||
|
if len(task_split) > 1:
|
||||||
|
task_queue = task_split[0]
|
||||||
|
task_pair = (task_split[1], task_queue)
|
||||||
|
transfer_callbacks.append(task_pair)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: move to contract registry
|
||||||
|
__convert_log_hash = '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095' # keccak256(Conversion(address,address,address,uint256,uint256,address)
|
||||||
|
__account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' # keccak256(AccountAdded(address,uint256))
|
||||||
|
|
||||||
|
__transfer_method_signature = '0xa9059cbb' # keccak256(transfer(address,uint256))
|
||||||
|
__transferfrom_method_signature = '0x23b872dd' # keccak256(transferFrom(address,address,uint256))
|
||||||
|
__giveto_method_signature = '0x63e4bff4' # keccak256(giveTo(address))
|
||||||
|
|
||||||
|
# TODO: move to bancor package
|
||||||
|
def parse_convert_log(w3, entry):
|
||||||
|
data = entry.data[2:]
|
||||||
|
from_amount = int(data[:64], 16)
|
||||||
|
to_amount = int(data[64:128], 16)
|
||||||
|
holder_address_hex_raw = '0x' + data[-40:]
|
||||||
|
holder_address_hex = w3.toChecksumAddress(holder_address_hex_raw)
|
||||||
|
o = {
|
||||||
|
'from_amount': from_amount,
|
||||||
|
'to_amount': to_amount,
|
||||||
|
'holder_address': holder_address_hex
|
||||||
|
}
|
||||||
|
logg.debug('parsed convert log {}'.format(o))
|
||||||
|
return o
|
||||||
|
|
||||||
|
|
||||||
|
def registration_filter(w3, tx, rcpt, chain_spec):
|
||||||
|
registered_address = None
|
||||||
|
for l in rcpt['logs']:
|
||||||
|
event_topic_hex = l['topics'][0].hex()
|
||||||
|
if event_topic_hex == __account_registry_add_log_hash:
|
||||||
|
address_bytes = l.topics[1][32-20:]
|
||||||
|
address = web3.Web3.toChecksumAddress(address_bytes.hex())
|
||||||
|
logg.debug('request token gift to {}'.format(address))
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.account.gift',
|
||||||
|
[
|
||||||
|
address,
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def convert_filter(w3, tx, rcpt, chain_spec):
|
||||||
|
destination_token_address = None
|
||||||
|
recipient_address = None
|
||||||
|
amount = 0
|
||||||
|
for l in rcpt['logs']:
|
||||||
|
event_topic_hex = l['topics'][0].hex()
|
||||||
|
if event_topic_hex == __convert_log_hash:
|
||||||
|
tx_hash_hex = tx['hash'].hex()
|
||||||
|
try:
|
||||||
|
convert_transfer = TxConvertTransfer.get(tx_hash_hex)
|
||||||
|
except UnknownConvertError:
|
||||||
|
logg.warning('skipping unknown convert tx {}'.format(tx_hash_hex))
|
||||||
|
continue
|
||||||
|
if convert_transfer.transfer_tx_hash != None:
|
||||||
|
logg.warning('convert tx {} cache record already has transfer hash {}, skipping'.format(tx_hash_hex, convert_transfer.transfer_hash))
|
||||||
|
continue
|
||||||
|
recipient_address = convert_transfer.recipient_address
|
||||||
|
logg.debug('found convert event {} recipient'.format(tx_hash_hex, recipient_address))
|
||||||
|
r = parse_convert_log(l)
|
||||||
|
destination_token_address = l['topics'][3][-20:]
|
||||||
|
|
||||||
|
if destination_token_address == zero_address or destination_token_address == None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
destination_token_address_hex = destination_token_address.hex()
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.bancor.transfer_converted',
|
||||||
|
[
|
||||||
|
[{
|
||||||
|
'address': w3.toChecksumAddress(destination_token_address_hex),
|
||||||
|
}],
|
||||||
|
r['holder_address'],
|
||||||
|
recipient_address,
|
||||||
|
r['to_amount'],
|
||||||
|
tx_hash_hex,
|
||||||
|
str(chain_spec),
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
logg.info('sending tx signature {}'.format(s))
|
||||||
|
t = s.apply_async()
|
||||||
|
logg.debug('submitted transfer after convert task uuid {} {}'.format(t, t.successful()))
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def tx_filter(w3, tx, rcpt, chain_spec):
|
||||||
|
tx_hash_hex = tx.hash.hex()
|
||||||
|
otx = Otx.load(tx_hash_hex)
|
||||||
|
if otx == None:
|
||||||
|
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
||||||
|
return None
|
||||||
|
logg.info('otx found {}'.format(otx.tx_hash))
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.queue.tx.set_final_status',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
rcpt.blockNumber,
|
||||||
|
rcpt.status == 0,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: replace with registry call instead
|
||||||
|
def get_token_symbol(w3, address):
|
||||||
|
#token = CICRegistry.get_address(CICRegistry.chain_spec, tx['to'])
|
||||||
|
logg.warning('token verification missing')
|
||||||
|
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
|
||||||
|
return c.functions.symbol().call()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: replace with registry call instead
|
||||||
|
def get_token_decimals(w3, address):
|
||||||
|
#token = CICRegistry.get_address(CICRegistry.chain_spec, tx['to'])
|
||||||
|
logg.warning('token verification missing')
|
||||||
|
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
|
||||||
|
return c.functions.decimals().call()
|
||||||
|
|
||||||
|
|
||||||
|
def callbacks_filter(w3, tx, rcpt, chain_spec):
|
||||||
|
transfer_data = None
|
||||||
|
if len(tx.input) < 10:
|
||||||
|
logg.debug('callbacks filter data length not sufficient for method signature in tx {}, skipping'.format(tx['hash']))
|
||||||
|
return
|
||||||
|
|
||||||
|
logg.debug('checking callbacks filter input {}'.format(tx.input[:10]))
|
||||||
|
|
||||||
|
transfer_type = 'transfer'
|
||||||
|
method_signature = tx.input[:10]
|
||||||
|
if method_signature == __transfer_method_signature:
|
||||||
|
transfer_data = unpack_transfer(tx.input)
|
||||||
|
transfer_data['from'] = tx['from']
|
||||||
|
transfer_data['token_address'] = tx['to']
|
||||||
|
elif method_signature == __transferfrom_method_signature:
|
||||||
|
transfer_type = 'transferfrom'
|
||||||
|
transfer_data = unpack_transferfrom(tx.input)
|
||||||
|
transfer_data['token_address'] = tx['to']
|
||||||
|
elif method_signature == __giveto_method_signature:
|
||||||
|
transfer_type = 'tokengift'
|
||||||
|
transfer_data = unpack_gift(tx.input)
|
||||||
|
for l in rcpt.logs:
|
||||||
|
if l.topics[0].hex() == '0x45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
|
||||||
|
transfer_data['amount'] = web3.Web3.toInt(hexstr=l.data)
|
||||||
|
token_address_bytes = l.topics[2][32-20:]
|
||||||
|
transfer_data['token_address'] = web3.Web3.toChecksumAddress(token_address_bytes.hex())
|
||||||
|
transfer_data['from'] = rcpt.to
|
||||||
|
|
||||||
|
if transfer_data != None:
|
||||||
|
for tc in transfer_callbacks:
|
||||||
|
token_symbol = None
|
||||||
|
try:
|
||||||
|
logg.debug('checking token {}'.format(transfer_data['token_address']))
|
||||||
|
token_symbol = get_token_symbol(w3, transfer_data['token_address'])
|
||||||
|
token_decimals = get_token_decimals(w3, transfer_data['token_address'])
|
||||||
|
logg.debug('calling transfer callback {}:{} for tx {}'.format(tc[1], tc[0], tx['hash']))
|
||||||
|
except UnknownContractError:
|
||||||
|
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tc[1], tc[0], transfer_data['to'], tx.hash.hex()))
|
||||||
|
continue
|
||||||
|
result = {
|
||||||
|
'hash': tx.hash.hex(),
|
||||||
|
'sender': transfer_data['from'],
|
||||||
|
'recipient': transfer_data['to'],
|
||||||
|
'source_value': transfer_data['amount'],
|
||||||
|
'destination_value': transfer_data['amount'],
|
||||||
|
'source_token': transfer_data['token_address'],
|
||||||
|
'destination_token': transfer_data['token_address'],
|
||||||
|
'source_token_symbol': token_symbol,
|
||||||
|
'destination_token_symbol': token_symbol,
|
||||||
|
'source_token_decimals': token_decimals,
|
||||||
|
'destination_token_decimals': token_decimals,
|
||||||
|
'chain': str(chain_spec),
|
||||||
|
}
|
||||||
|
s = celery.signature(
|
||||||
|
tc[0],
|
||||||
|
[
|
||||||
|
result,
|
||||||
|
transfer_type,
|
||||||
|
int(rcpt.status == 0),
|
||||||
|
],
|
||||||
|
queue=tc[1],
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
class GasFilter:
|
||||||
|
|
||||||
|
def __init__(self, gas_provider):
|
||||||
|
self.gas_provider = gas_provider
|
||||||
|
|
||||||
|
def filter(self, w3, tx, rcpt, chain_str):
|
||||||
|
tx_hash_hex = tx.hash.hex()
|
||||||
|
if tx['value'] > 0:
|
||||||
|
logg.debug('gas refill tx {}'.format(tx_hash_hex))
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
q = session.query(TxCache.recipient)
|
||||||
|
q = q.join(Otx)
|
||||||
|
q = q.filter(Otx.tx_hash==tx_hash_hex)
|
||||||
|
r = q.first()
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
if r == None:
|
||||||
|
logg.warning('unsolicited gas refill tx {}'.format(tx_hash_hex))
|
||||||
|
return
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
txs = get_paused_txs(StatusEnum.WAITFORGAS, r[0], chain_spec.chain_id())
|
||||||
|
|
||||||
|
if len(txs) > 0:
|
||||||
|
logg.info('resuming gas-in-waiting txs for {}: {}'.format(r[0], txs.keys()))
|
||||||
|
s = create_check_gas_and_send_task(
|
||||||
|
list(txs.values()),
|
||||||
|
str(chain_str),
|
||||||
|
r[0],
|
||||||
|
0,
|
||||||
|
tx_hashes_hex=list(txs.keys()),
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
|
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||||
|
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||||
|
chain_registry = ChainRegistry(chain_spec)
|
||||||
|
CICRegistry.add_chain_registry(chain_registry)
|
||||||
|
|
||||||
|
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
|
||||||
|
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
|
||||||
|
|
||||||
|
syncers = []
|
||||||
|
block_offset = c.w3.eth.blockNumber
|
||||||
|
chain = str(chain_spec)
|
||||||
|
|
||||||
|
if SyncerBackend.first(chain):
|
||||||
|
from cic_eth.sync.history import HistorySyncer
|
||||||
|
backend = SyncerBackend.initial(chain, block_offset)
|
||||||
|
syncer = HistorySyncer(backend)
|
||||||
|
syncers.append(syncer)
|
||||||
|
|
||||||
|
if args.mode == 'head':
|
||||||
|
from cic_eth.sync.head import HeadSyncer
|
||||||
|
block_sync = SyncerBackend.live(chain, block_offset+1)
|
||||||
|
syncers.append(HeadSyncer(block_sync))
|
||||||
|
elif args.mode == 'history':
|
||||||
|
from cic_eth.sync.history import HistorySyncer
|
||||||
|
backends = SyncerBackend.resume(chain, block_offset+1)
|
||||||
|
for backend in backends:
|
||||||
|
syncers.append(HistorySyncer(backend))
|
||||||
|
if len(syncers) == 0:
|
||||||
|
logg.info('found no unsynced history. terminating')
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
sys.stderr.write("unknown mode '{}'\n".format(args.mode))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
|
||||||
|
# bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
|
||||||
|
# bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
|
||||||
|
# bancor_registry.load()
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
for syncer in syncers:
|
||||||
|
logg.debug('running syncer index {}'.format(i))
|
||||||
|
gas_filter = GasFilter(c.gas_provider()).filter
|
||||||
|
syncer.filter.append(gas_filter)
|
||||||
|
syncer.filter.append(registration_filter)
|
||||||
|
syncer.filter.append(callbacks_filter)
|
||||||
|
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
||||||
|
syncer.filter.append(tx_filter)
|
||||||
|
syncer.filter.append(convert_filter)
|
||||||
|
|
||||||
|
try:
|
||||||
|
syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')))
|
||||||
|
except LoopDone as e:
|
||||||
|
sys.stderr.write("sync '{}' done at block {}\n".format(args.mode, e))
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
214
apps/cic-eth/cic_eth/runnable/retry.py
Normal file
214
apps/cic-eth/cic_eth/runnable/retry.py
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import web3
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
from cic_eth.db import SessionBase
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.sync.retry import RetrySyncer
|
||||||
|
from cic_eth.queue.tx import get_status_tx
|
||||||
|
from cic_eth.queue.tx import get_tx
|
||||||
|
from cic_eth.admin.ctrl import lock_send
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx_hex
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('--retry-delay', dest='retry_delay', type=str, help='seconds to wait for retrying a transaction that is marked as sent')
|
||||||
|
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
|
||||||
|
|
||||||
|
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
|
||||||
|
|
||||||
|
# TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here
|
||||||
|
def sendfail_filter(w3, tx_hash, rcpt, chain_str):
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
tx_dict = get_tx(tx_hash)
|
||||||
|
tx = unpack_signed_raw_tx_hex(tx_dict['signed_tx'], chain_spec.chain_id())
|
||||||
|
logg.debug('submitting tx {} for retry'.format(tx_hash))
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
tx_hash,
|
||||||
|
chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
tx['from'],
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
# s_resume = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.resume_tx',
|
||||||
|
# [
|
||||||
|
# chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=queue,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# s_retry_status = celery.signature(
|
||||||
|
# 'cic_eth.queue.tx.set_ready',
|
||||||
|
# [],
|
||||||
|
# queue=queue,
|
||||||
|
# )
|
||||||
|
s_resend = celery.signature(
|
||||||
|
'cic_eth.eth.tx.resend_with_higher_gas',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
#s_resume.link(s_retry_status)
|
||||||
|
#s_check.link(s_resume)
|
||||||
|
s_check.link(s_resend)
|
||||||
|
s_check.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: can we merely use the dispatcher instead?
|
||||||
|
def dispatch(chain_str):
|
||||||
|
txs = get_status_tx(StatusEnum.RETRY, datetime.datetime.utcnow())
|
||||||
|
if len(txs) == 0:
|
||||||
|
logg.debug('no retry state txs found')
|
||||||
|
return
|
||||||
|
#signed_txs = list(txs.values())
|
||||||
|
#logg.debug('signed txs {} chain {}'.format(signed_txs, chain_str))
|
||||||
|
#for tx in signed_txs:
|
||||||
|
for k in txs.keys():
|
||||||
|
#tx_cache = get_tx_cache(k)
|
||||||
|
tx_raw = txs[k]
|
||||||
|
tx = unpack_signed_raw_tx_hex(tx_raw, chain_spec.chain_id())
|
||||||
|
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[tx_raw],
|
||||||
|
chain_str,
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
tx['from'],
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_send = celery.signature(
|
||||||
|
'cic_eth.eth.tx.send',
|
||||||
|
[
|
||||||
|
chain_str,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_check.link(s_send)
|
||||||
|
t = s_check.apply_async()
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# r = t.get()
|
||||||
|
# logg.debug('submitted as {} result {} with queue task {}'.format(t, r, t.children[0].get()))
|
||||||
|
# except PermanentTxError as e:
|
||||||
|
# logg.error('tx {} permanently failed: {}'.format(tx, e))
|
||||||
|
# except TemporaryTxError as e:
|
||||||
|
# logg.error('tx {} temporarily failed: {}'.format(tx, e))
|
||||||
|
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#def straggler_filter(w3, tx, rcpt, chain_str):
|
||||||
|
# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=straggler_delay)
|
||||||
|
# txs = get_status_tx(StatusEnum.SENT, before)
|
||||||
|
# if len(txs) == 0:
|
||||||
|
# logg.debug('no straggler txs found')
|
||||||
|
# return
|
||||||
|
# txs = list(txs.keys())
|
||||||
|
# logg.debug('straggler txs {} chain {}'.format(signed_txs, chain_str))
|
||||||
|
# s_send = celery.signature(
|
||||||
|
# 'cic_eth.eth.resend_with_higher_gas',
|
||||||
|
# [
|
||||||
|
# txs,
|
||||||
|
# chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=queue,
|
||||||
|
# )
|
||||||
|
# s_send.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||||
|
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||||
|
|
||||||
|
syncer = RetrySyncer(chain_spec, straggler_delay, final_func=dispatch)
|
||||||
|
syncer.filter.append(sendfail_filter)
|
||||||
|
syncer.loop(float(straggler_delay))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
173
apps/cic-eth/cic_eth/runnable/server_agent.py
Normal file
173
apps/cic-eth/cic_eth/runnable/server_agent.py
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import web3
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
config = confini.Config(args.c, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
args_override = {
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
}
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config:\n{}'.format(config))
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
re_transfer_approval_request = r'^/transferrequest/?'
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
|
||||||
|
def process_transfer_approval_request(session, env):
|
||||||
|
r = re.match(re_transfer_approval_request, env.get('PATH_INFO'))
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if env.get('CONTENT_TYPE') != 'application/json':
|
||||||
|
raise AttributeError('content type')
|
||||||
|
|
||||||
|
if env.get('REQUEST_METHOD') != 'POST':
|
||||||
|
raise AttributeError('method')
|
||||||
|
|
||||||
|
post_data = json.load(env.get('wsgi.input'))
|
||||||
|
token_address = web3.Web3.toChecksumAddress(post_data['token_address'])
|
||||||
|
holder_address = web3.Web3.toChecksumAddress(post_data['holder_address'])
|
||||||
|
beneficiary_address = web3.Web3.toChecksumAddress(post_data['beneficiary_address'])
|
||||||
|
value = int(post_data['value'])
|
||||||
|
|
||||||
|
logg.debug('transfer approval request token {} to {} from {} value {}'.format(
|
||||||
|
token_address,
|
||||||
|
beneficiary_address,
|
||||||
|
holder_address,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.request.transfer_approval_request',
|
||||||
|
[
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'address': token_address,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
holder_address,
|
||||||
|
beneficiary_address,
|
||||||
|
value,
|
||||||
|
config.get('CIC_CHAIN_SPEC'),
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
r = t.get()
|
||||||
|
tx_raw_bytes = bytes.fromhex(r[0][2:])
|
||||||
|
tx = unpack_signed_raw_tx(tx_raw_bytes, chain_spec.chain_id())
|
||||||
|
for r in t.collect():
|
||||||
|
logg.debug('result {}'.format(r))
|
||||||
|
|
||||||
|
if not t.successful():
|
||||||
|
raise RuntimeError(tx['hash'])
|
||||||
|
|
||||||
|
return ('text/plain', tx['hash'].encode('utf-8'),)
|
||||||
|
|
||||||
|
|
||||||
|
# uwsgi application
|
||||||
|
def application(env, start_response):
|
||||||
|
|
||||||
|
for k in env.keys():
|
||||||
|
logg.debug('env {} {}'.format(k, env[k]))
|
||||||
|
|
||||||
|
headers = []
|
||||||
|
content = b''
|
||||||
|
err = None
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
for handler in [
|
||||||
|
process_transfer_approval_request,
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
r = handler(session, env)
|
||||||
|
except AttributeError as e:
|
||||||
|
logg.error('handler fail attribute {}'.format(e))
|
||||||
|
err = '400 Impertinent request'
|
||||||
|
break
|
||||||
|
except JSONDecodeError as e:
|
||||||
|
logg.error('handler fail json {}'.format(e))
|
||||||
|
err = '400 Invalid data format'
|
||||||
|
break
|
||||||
|
except KeyError as e:
|
||||||
|
logg.error('handler fail key {}'.format(e))
|
||||||
|
err = '400 Invalid JSON'
|
||||||
|
break
|
||||||
|
except ValueError as e:
|
||||||
|
logg.error('handler fail value {}'.format(e))
|
||||||
|
err = '400 Invalid data'
|
||||||
|
break
|
||||||
|
except RuntimeError as e:
|
||||||
|
logg.error('task fail value {}'.format(e))
|
||||||
|
err = '500 Task failed, sorry I cannot tell you more'
|
||||||
|
break
|
||||||
|
if r != None:
|
||||||
|
(mime_type, content) = r
|
||||||
|
break
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
if err != None:
|
||||||
|
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||||
|
start_response(err, headers)
|
||||||
|
session.close()
|
||||||
|
return [content]
|
||||||
|
|
||||||
|
headers.append(('Content-Length', str(len(content))),)
|
||||||
|
headers.append(('Access-Control-Allow-Origin', '*',));
|
||||||
|
|
||||||
|
if len(content) == 0:
|
||||||
|
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||||
|
start_response('404 Looked everywhere, sorry', headers)
|
||||||
|
else:
|
||||||
|
headers.append(('Content-Type', mime_type,))
|
||||||
|
start_response('200 OK', headers)
|
||||||
|
|
||||||
|
return [content]
|
77
apps/cic-eth/cic_eth/runnable/tag.py
Normal file
77
apps/cic-eth/cic_eth/runnable/tag.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import web3
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
import confini
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.api import AdminApi
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||||
|
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='Web3 provider url (http only)')
|
||||||
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
argparser.add_argument('tag', type=str, help='address tag')
|
||||||
|
argparser.add_argument('address', type=str, help='address')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config = confini.Config(args.c)
|
||||||
|
config.process()
|
||||||
|
args_override = {
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}\n{}'.format(args.c, config))
|
||||||
|
|
||||||
|
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
c = RpcClient(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
api = AdminApi(c)
|
||||||
|
api.tag_account(args.tag, args.address)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
238
apps/cic-eth/cic_eth/runnable/tasker.py
Normal file
238
apps/cic-eth/cic_eth/runnable/tasker.py
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
# standard imports
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import tempfile
|
||||||
|
import re
|
||||||
|
import urllib
|
||||||
|
import websocket
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
import confini
|
||||||
|
from crypto_dev_signer.eth.web3ext import Web3 as Web3Ext
|
||||||
|
from web3 import HTTPProvider, WebsocketProvider
|
||||||
|
from gas_proxy.web3 import GasMiddleware
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_registry.registry import CICRegistry
|
||||||
|
from cic_registry.registry import ChainRegistry
|
||||||
|
from cic_registry.registry import ChainSpec
|
||||||
|
from cic_registry.helper.declarator import DeclaratorOracleAdapter
|
||||||
|
|
||||||
|
from cic_bancor.bancor import BancorRegistryClient
|
||||||
|
from cic_eth.eth import bancor
|
||||||
|
from cic_eth.eth import token
|
||||||
|
from cic_eth.eth import tx
|
||||||
|
from cic_eth.eth import account
|
||||||
|
from cic_eth.eth import request
|
||||||
|
from cic_eth.admin import debug
|
||||||
|
from cic_eth.admin import ctrl
|
||||||
|
from cic_eth.eth.rpc import RpcClient
|
||||||
|
from cic_eth.eth.rpc import GasOracle
|
||||||
|
from cic_eth.queue import tx
|
||||||
|
from cic_eth.callbacks import Callback
|
||||||
|
from cic_eth.callbacks import http
|
||||||
|
from cic_eth.callbacks import tcp
|
||||||
|
from cic_eth.callbacks import redis
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db import dsn_from_config
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='web3 provider')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||||
|
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
||||||
|
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
config = confini.Config(args.c, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||||
|
|
||||||
|
# connect to database
|
||||||
|
dsn = dsn_from_config(config)
|
||||||
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
# verify database connection with minimal sanity query
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
session.execute('select version_num from alembic_version')
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# set up celery
|
||||||
|
current_app = celery.Celery(__name__)
|
||||||
|
|
||||||
|
broker = config.get('CELERY_BROKER_URL')
|
||||||
|
if broker[:4] == 'file':
|
||||||
|
bq = tempfile.mkdtemp()
|
||||||
|
bp = tempfile.mkdtemp()
|
||||||
|
current_app.conf.update({
|
||||||
|
'broker_url': broker,
|
||||||
|
'broker_transport_options': {
|
||||||
|
'data_folder_in': bq,
|
||||||
|
'data_folder_out': bq,
|
||||||
|
'data_folder_processed': bp,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
|
||||||
|
else:
|
||||||
|
current_app.conf.update({
|
||||||
|
'broker_url': broker,
|
||||||
|
})
|
||||||
|
|
||||||
|
result = config.get('CELERY_RESULT_URL')
|
||||||
|
if result[:4] == 'file':
|
||||||
|
rq = tempfile.mkdtemp()
|
||||||
|
current_app.conf.update({
|
||||||
|
'result_backend': 'file://{}'.format(rq),
|
||||||
|
})
|
||||||
|
logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
|
||||||
|
else:
|
||||||
|
current_app.conf.update({
|
||||||
|
'result_backend': result,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# set up web3
|
||||||
|
# TODO: web3 socket wrapping is now a lot of code. factor out
|
||||||
|
class JSONRPCHttpSocketAdapter:
|
||||||
|
|
||||||
|
def __init__(self, url):
|
||||||
|
self.response = None
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
def send(self, data):
|
||||||
|
logg.debug('redirecting socket send to jsonrpc http socket adapter {} {}'.format(self.url, data))
|
||||||
|
req = urllib.request.Request(self.url, method='POST')
|
||||||
|
req.add_header('Content-type', 'application/json')
|
||||||
|
req.add_header('Connection', 'close')
|
||||||
|
res = urllib.request.urlopen(req, data=data.encode('utf-8'))
|
||||||
|
self.response = res.read().decode('utf-8')
|
||||||
|
logg.debug('setting jsonrpc http socket adapter response to {}'.format(self.response))
|
||||||
|
|
||||||
|
def recv(self, n=0):
|
||||||
|
return self.response
|
||||||
|
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
socket_constructor = None
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
def socket_constructor_ws():
|
||||||
|
return websocket.create_connection(config.get('ETH_PROVIDER'))
|
||||||
|
socket_constructor = socket_constructor_ws
|
||||||
|
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
def socket_constructor_http():
|
||||||
|
return JSONRPCHttpSocketAdapter(config.get('ETH_PROVIDER'))
|
||||||
|
socket_constructor = socket_constructor_http
|
||||||
|
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
|
||||||
|
def web3ext_constructor():
|
||||||
|
w3 = Web3Ext(blockchain_provider, config.get('SIGNER_SOCKET_PATH'))
|
||||||
|
GasMiddleware.socket_constructor = socket_constructor
|
||||||
|
w3.middleware_onion.add(GasMiddleware)
|
||||||
|
|
||||||
|
def sign_transaction(tx):
|
||||||
|
r = w3.eth.signTransaction(tx)
|
||||||
|
d = r.__dict__
|
||||||
|
for k in d.keys():
|
||||||
|
if k == 'tx':
|
||||||
|
d[k] = d[k].__dict__
|
||||||
|
else:
|
||||||
|
d[k] = d[k].hex()
|
||||||
|
return d
|
||||||
|
|
||||||
|
setattr(w3.eth, 'sign_transaction', sign_transaction)
|
||||||
|
setattr(w3.eth, 'send_raw_transaction', w3.eth.sendRawTransaction)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3ext_constructor)
|
||||||
|
|
||||||
|
logg.info('ccc {}'.format(config.store['TASKS_TRACE_QUEUE_STATUS']))
|
||||||
|
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
argv = ['worker']
|
||||||
|
if args.vv:
|
||||||
|
argv.append('--loglevel=DEBUG')
|
||||||
|
elif args.v:
|
||||||
|
argv.append('--loglevel=INFO')
|
||||||
|
argv.append('-Q')
|
||||||
|
argv.append(args.q)
|
||||||
|
argv.append('-n')
|
||||||
|
argv.append(args.q)
|
||||||
|
|
||||||
|
if config.true('SSL_ENABLE_CLIENT'):
|
||||||
|
Callback.ssl = True
|
||||||
|
Callback.ssl_cert_file = config.get('SSL_CERT_FILE')
|
||||||
|
Callback.ssl_key_file = config.get('SSL_KEY_FILE')
|
||||||
|
Callback.ssl_password = config.get('SSL_PASSWORD')
|
||||||
|
|
||||||
|
if config.get('SSL_CA_FILE') != '':
|
||||||
|
Callback.ssl_ca_file = config.get('SSL_CA_FILE')
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||||
|
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||||
|
|
||||||
|
chain_registry = ChainRegistry(chain_spec)
|
||||||
|
CICRegistry.add_chain_registry(chain_registry, True)
|
||||||
|
|
||||||
|
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
|
||||||
|
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
|
||||||
|
|
||||||
|
if config.get('CIC_DECLARATOR_ADDRESS') != None:
|
||||||
|
abi_path = os.path.join(config.get('ETH_ABI_DIR'), '{}.json'.format(interface))
|
||||||
|
f = open(abi_path)
|
||||||
|
abi = json.load(abi_path)
|
||||||
|
f.close()
|
||||||
|
c = w3.eth.contract(abi=abi, address=address)
|
||||||
|
trusted_addresses = config.get('CIC_TRUSTED_ADDRESSES', []).split(',')
|
||||||
|
oracle = DeclaratorOracleAdapter(contract, trusted_addresses)
|
||||||
|
chain_registry.add_oracle(oracle)
|
||||||
|
|
||||||
|
|
||||||
|
#chain_spec = CICRegistry.default_chain_spec
|
||||||
|
#bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
|
||||||
|
#bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
|
||||||
|
#bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
|
||||||
|
#bancor_registry.load(True)
|
||||||
|
current_app.worker_main(argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
177
apps/cic-eth/cic_eth/runnable/view.py
Normal file
177
apps/cic-eth/cic_eth/runnable/view.py
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
#!python3
|
||||||
|
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from cic_registry import CICRegistry
|
||||||
|
from cic_registry.chain import ChainSpec
|
||||||
|
from cic_registry.chain import ChainRegistry
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.api import AdminApi
|
||||||
|
from cic_eth.eth.rpc import RpcClient
|
||||||
|
from cic_eth.db.enum import StatusEnum
|
||||||
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
logging.getLogger('web3').setLevel(logging.WARNING)
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
||||||
|
default_config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||||
|
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
|
||||||
|
argparser.add_argument('-f', '--format', dest='f', default='terminal', type=str, help='Output format')
|
||||||
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
argparser.add_argument('query', type=str, help='Transaction, transaction hash, account or "lock"')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
args_override = {
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
}
|
||||||
|
# override args
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
re_websocket = re.compile('^wss?://')
|
||||||
|
re_http = re.compile('^https?://')
|
||||||
|
blockchain_provider = config.get('ETH_PROVIDER')
|
||||||
|
if re.match(re_websocket, blockchain_provider) != None:
|
||||||
|
blockchain_provider = web3.Web3.WebsocketProvider(blockchain_provider)
|
||||||
|
elif re.match(re_http, blockchain_provider) != None:
|
||||||
|
blockchain_provider = web3.Web3.HTTPProvider(blockchain_provider)
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||||
|
|
||||||
|
def web3_constructor():
|
||||||
|
w3 = web3.Web3(blockchain_provider)
|
||||||
|
return (blockchain_provider, w3)
|
||||||
|
RpcClient.set_constructor(web3_constructor)
|
||||||
|
|
||||||
|
|
||||||
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
chain_str = str(chain_spec)
|
||||||
|
c = RpcClient(chain_spec)
|
||||||
|
admin_api = AdminApi(c)
|
||||||
|
|
||||||
|
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||||
|
chain_registry = ChainRegistry(chain_spec)
|
||||||
|
CICRegistry.add_chain_registry(chain_registry)
|
||||||
|
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||||
|
CICRegistry.load_for(chain_spec)
|
||||||
|
|
||||||
|
fmt = 'terminal'
|
||||||
|
if args.f[:1] == 'j':
|
||||||
|
fmt = 'json'
|
||||||
|
elif args.f[:1] != 't':
|
||||||
|
raise ValueError('unknown output format {}'.format(args.f))
|
||||||
|
|
||||||
|
|
||||||
|
def render_tx(o, **kwargs):
|
||||||
|
content = ''
|
||||||
|
for k in o.keys():
|
||||||
|
if not k in ['status_log']:
|
||||||
|
content += '{}: {}\n'.format(k, o[k])
|
||||||
|
content += 'status log:\n'
|
||||||
|
|
||||||
|
for v in o.get('status_log', []):
|
||||||
|
d = datetime.datetime.fromisoformat(v[0])
|
||||||
|
e = StatusEnum(v[1]).name
|
||||||
|
content += '{}: {}\n'.format(d, e)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def render_account(o, **kwargs):
|
||||||
|
return '{} {} {} {}'.format(
|
||||||
|
o['date_updated'],
|
||||||
|
o['nonce'],
|
||||||
|
o['tx_hash'],
|
||||||
|
o['status'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def render_lock(o, **kwargs):
|
||||||
|
lockstrings = []
|
||||||
|
flags = o['flags']
|
||||||
|
for i in range(31):
|
||||||
|
v = 1 << i
|
||||||
|
if flags & v:
|
||||||
|
lockstrings.append(LockEnum(v).name)
|
||||||
|
|
||||||
|
s = '{} {} {}'.format(
|
||||||
|
o['address'],
|
||||||
|
o['date'],
|
||||||
|
",".join(lockstrings),
|
||||||
|
)
|
||||||
|
if o['tx_hash'] != None:
|
||||||
|
s += ' ' + o['tx_hash']
|
||||||
|
|
||||||
|
return s
|
||||||
|
|
||||||
|
# TODO: move each command to submodule
|
||||||
|
def main():
|
||||||
|
logg.debug('len {}'.format(len(args.query)))
|
||||||
|
txs = []
|
||||||
|
renderer = render_tx
|
||||||
|
if len(args.query) > 66:
|
||||||
|
txs = [admin_api.tx(chain_spec, tx_raw=args.query)]
|
||||||
|
elif len(args.query) > 42:
|
||||||
|
txs = [admin_api.tx(chain_spec, tx_hash=args.query)]
|
||||||
|
elif len(args.query) == 42:
|
||||||
|
txs = admin_api.account(chain_spec, args.query, include_recipient=False)
|
||||||
|
renderer = render_account
|
||||||
|
elif len(args.query) >= 4 and args.query[:4] == 'lock':
|
||||||
|
txs = admin_api.get_lock()
|
||||||
|
renderer = render_lock
|
||||||
|
else:
|
||||||
|
raise ValueError('cannot parse argument {}'.format(args.query))
|
||||||
|
|
||||||
|
if len(txs) == 0:
|
||||||
|
logg.info('no matches found')
|
||||||
|
else:
|
||||||
|
if fmt == 'json':
|
||||||
|
sys.stdout.write(json.dumps(txs))
|
||||||
|
else:
|
||||||
|
m = map(renderer, txs)
|
||||||
|
print(*m, sep="\n")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
1
apps/cic-eth/cic_eth/sync/__init__.py
Normal file
1
apps/cic-eth/cic_eth/sync/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from .base import Syncer
|
196
apps/cic-eth/cic_eth/sync/backend.py
Normal file
196
apps/cic-eth/cic_eth/sync/backend.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.sync import BlockchainSync
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class SyncerBackend:
|
||||||
|
"""Interface to block and transaction sync state.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec for the chain that syncer is running for.
|
||||||
|
:type chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
:param object_id: Unique id for the syncer session.
|
||||||
|
:type object_id: number
|
||||||
|
"""
|
||||||
|
def __init__(self, chain_spec, object_id):
|
||||||
|
self.db_session = None
|
||||||
|
self.db_object = None
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.object_id = object_id
|
||||||
|
self.connect()
|
||||||
|
self.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
"""Loads the state of the syncer session with the given id.
|
||||||
|
"""
|
||||||
|
self.db_session = SessionBase.create_session()
|
||||||
|
q = self.db_session.query(BlockchainSync)
|
||||||
|
q = q.filter(BlockchainSync.id==self.object_id)
|
||||||
|
self.db_object = q.first()
|
||||||
|
if self.db_object == None:
|
||||||
|
raise ValueError('sync entry with id {} not found'.format(self.object_id))
|
||||||
|
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
"""Commits state of sync to backend.
|
||||||
|
"""
|
||||||
|
self.db_session.add(self.db_object)
|
||||||
|
self.db_session.commit()
|
||||||
|
self.db_session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
"""Returns chain spec for syncer
|
||||||
|
|
||||||
|
:returns: Chain spec
|
||||||
|
:rtype chain_spec: cic_registry.chain.ChainSpec
|
||||||
|
"""
|
||||||
|
return self.chain_spec
|
||||||
|
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
"""Get the current state of the syncer cursor.
|
||||||
|
|
||||||
|
:returns: Block and block transaction height, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
self.connect()
|
||||||
|
pair = self.db_object.cursor()
|
||||||
|
self.disconnect()
|
||||||
|
return pair
|
||||||
|
|
||||||
|
|
||||||
|
def set(self, block_height, tx_height):
|
||||||
|
"""Update the state of the syncer cursor
|
||||||
|
:param block_height: Block height of cursor
|
||||||
|
:type block_height: number
|
||||||
|
:param tx_height: Block transaction height of cursor
|
||||||
|
:type tx_height: number
|
||||||
|
:returns: Block and block transaction height, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
self.connect()
|
||||||
|
pair = self.db_object.set(block_height, tx_height)
|
||||||
|
self.disconnect()
|
||||||
|
return pair
|
||||||
|
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Get the initial state of the syncer cursor.
|
||||||
|
|
||||||
|
:returns: Initial block and block transaction height, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
self.connect()
|
||||||
|
pair = self.db_object.start()
|
||||||
|
self.disconnect()
|
||||||
|
return pair
|
||||||
|
|
||||||
|
|
||||||
|
def target(self):
|
||||||
|
"""Get the target state (upper bound of sync) of the syncer cursor.
|
||||||
|
|
||||||
|
:returns: Target block height
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
self.connect()
|
||||||
|
target = self.db_object.target()
|
||||||
|
self.disconnect()
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def first(chain):
|
||||||
|
"""Returns the model object of the most recent syncer in backend.
|
||||||
|
|
||||||
|
:param chain: Chain spec of chain that syncer is running for.
|
||||||
|
:type chain: cic_registry.chain.ChainSpec
|
||||||
|
:returns: Last syncer object
|
||||||
|
:rtype: cic_eth.db.models.BlockchainSync
|
||||||
|
"""
|
||||||
|
return BlockchainSync.first(chain)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def initial(chain, block_height):
|
||||||
|
"""Creates a new syncer session and commit its initial state to backend.
|
||||||
|
|
||||||
|
:param chain: Chain spec of chain that syncer is running for.
|
||||||
|
:type chain: cic_registry.chain.ChainSpec
|
||||||
|
:param block_height: Target block height
|
||||||
|
:type block_height: number
|
||||||
|
:returns: New syncer object
|
||||||
|
:rtype: cic_eth.db.models.BlockchainSync
|
||||||
|
"""
|
||||||
|
object_id = None
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = BlockchainSync(chain, 0, 0, block_height)
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
object_id = o.id
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return SyncerBackend(chain, object_id)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resume(chain, block_height):
|
||||||
|
"""Retrieves and returns all previously unfinished syncer sessions.
|
||||||
|
|
||||||
|
|
||||||
|
:param chain: Chain spec of chain that syncer is running for.
|
||||||
|
:type chain: cic_registry.chain.ChainSpec
|
||||||
|
:param block_height: Target block height
|
||||||
|
:type block_height: number
|
||||||
|
:returns: Syncer objects of unfinished syncs
|
||||||
|
:rtype: list of cic_eth.db.models.BlockchainSync
|
||||||
|
"""
|
||||||
|
syncers = []
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
object_id = None
|
||||||
|
|
||||||
|
for object_id in BlockchainSync.get_unsynced(session=session):
|
||||||
|
logg.debug('block syncer resume added previously unsynced sync entry id {}'.format(object_id))
|
||||||
|
syncers.append(SyncerBackend(chain, object_id))
|
||||||
|
|
||||||
|
(block_resume, tx_resume) = BlockchainSync.get_last_live_height(block_height, session=session)
|
||||||
|
if block_height != block_resume:
|
||||||
|
o = BlockchainSync(chain, block_resume, tx_resume, block_height)
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
object_id = o.id
|
||||||
|
syncers.append(SyncerBackend(chain, object_id))
|
||||||
|
logg.debug('block syncer resume added new sync entry from previous run id {}, start{}:{} target {}'.format(object_id, block_resume, tx_resume, block_height))
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return syncers
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def live(chain, block_height):
|
||||||
|
"""Creates a new open-ended syncer session starting at the given block height.
|
||||||
|
|
||||||
|
:param chain: Chain spec of chain that syncer is running for.
|
||||||
|
:type chain: cic_registry.chain.ChainSpec
|
||||||
|
:param block_height: Target block height
|
||||||
|
:type block_height: number
|
||||||
|
:returns: "Live" syncer object
|
||||||
|
:rtype: cic_eth.db.models.BlockchainSync
|
||||||
|
"""
|
||||||
|
object_id = None
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
o = BlockchainSync(chain, block_height, 0, None)
|
||||||
|
session.add(o)
|
||||||
|
session.commit()
|
||||||
|
object_id = o.id
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
return SyncerBackend(chain, object_id)
|
51
apps/cic-eth/cic_eth/sync/base.py
Normal file
51
apps/cic-eth/cic_eth/sync/base.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# TODO: extend blocksync model
|
||||||
|
class Syncer:
|
||||||
|
"""Base class and interface for implementing a block sync poller routine.
|
||||||
|
|
||||||
|
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
||||||
|
:type bc_cache: cic_eth.sync.SyncerBackend
|
||||||
|
"""
|
||||||
|
w3 = None
|
||||||
|
running_global = True
|
||||||
|
|
||||||
|
def __init__(self, bc_cache):
|
||||||
|
self.cursor = None
|
||||||
|
self.bc_cache = bc_cache
|
||||||
|
self.filter = []
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
|
||||||
|
def chain(self):
|
||||||
|
"""Returns the string representation of the chain spec for the chain the syncer is running on.
|
||||||
|
|
||||||
|
:returns: Chain spec string
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
return self.bc_cache.chain()
|
||||||
|
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
"""Get latest unprocessed blocks.
|
||||||
|
|
||||||
|
:returns: list of block hash strings
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def process(self, w3, ref):
|
||||||
|
"""Process transactions in a single block.
|
||||||
|
|
||||||
|
:param ref: Reference of object to process
|
||||||
|
:type ref: str, 0x-hex
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def loop(self, interval):
|
||||||
|
"""Entry point for syncer loop
|
||||||
|
|
||||||
|
:param interval: Delay in seconds until next attempt if no new blocks are found.
|
||||||
|
:type interval: int
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
4
apps/cic-eth/cic_eth/sync/error.py
Normal file
4
apps/cic-eth/cic_eth/sync/error.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
class LoopDone(Exception):
|
||||||
|
"""Exception raised when a syncing is complete.
|
||||||
|
"""
|
||||||
|
pass
|
51
apps/cic-eth/cic_eth/sync/head.py
Normal file
51
apps/cic-eth/cic_eth/sync/head.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import web3
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .mined import MinedSyncer
|
||||||
|
from .base import Syncer
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class HeadSyncer(MinedSyncer):
|
||||||
|
"""Implements the get method in Syncer for retrieving every new mined block.
|
||||||
|
|
||||||
|
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
||||||
|
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
||||||
|
"""
|
||||||
|
def __init__(self, bc_cache):
|
||||||
|
super(HeadSyncer, self).__init__(bc_cache)
|
||||||
|
# TODO: filter not returning all blocks, at least with ganache. kind of defeats the point, then
|
||||||
|
#self.w3_filter = rpc.w3.eth.filter({
|
||||||
|
# 'fromBlock': block_offset,
|
||||||
|
# }) #'latest')
|
||||||
|
#self.bc_cache.set(block_offset, 0)
|
||||||
|
logg.debug('initialized head syncer with offset {}'.format(bc_cache.start()))
|
||||||
|
|
||||||
|
"""Implements Syncer.get
|
||||||
|
|
||||||
|
:param w3: Web3 object
|
||||||
|
:type w3: web3.Web3
|
||||||
|
:returns: Block hash of newly mined blocks. if any
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
def get(self, w3):
|
||||||
|
# Of course, the filter doesn't return the same block dict format as getBlock() so we'll just waste some cycles getting the hashes instead.
|
||||||
|
#hashes = []
|
||||||
|
#for block in self.w3_filter.get_new_entries():
|
||||||
|
# hashes.append(block['blockHash'])
|
||||||
|
#logg.debug('blocks {}'.format(hashes))
|
||||||
|
#return hashes
|
||||||
|
(block_number, tx_number) = self.bc_cache.get()
|
||||||
|
block_hash = []
|
||||||
|
try:
|
||||||
|
block = w3.eth.getBlock(block_number)
|
||||||
|
block_hash.append(block.hash)
|
||||||
|
except web3.exceptions.BlockNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return block_hash
|
74
apps/cic-eth/cic_eth/sync/history.py
Normal file
74
apps/cic-eth/cic_eth/sync/history.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from web3.exceptions import BlockNotFound
|
||||||
|
from .error import LoopDone
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .mined import MinedSyncer
|
||||||
|
from .base import Syncer
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class HistorySyncer(MinedSyncer):
|
||||||
|
"""Implements the get method in Syncer for retrieving all blocks between last processed block before previous shutdown and block height at time of syncer start.
|
||||||
|
|
||||||
|
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
||||||
|
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
||||||
|
:param mx: Maximum number of blocks to return in one call
|
||||||
|
:type mx: int
|
||||||
|
"""
|
||||||
|
def __init__(self, bc_cache, mx=20):
|
||||||
|
super(HistorySyncer, self).__init__(bc_cache)
|
||||||
|
self.max = mx
|
||||||
|
|
||||||
|
self.target = bc_cache.target()
|
||||||
|
logg.info('History syncer target block number {}'.format(self.target))
|
||||||
|
|
||||||
|
session_offset = self.bc_cache.get()
|
||||||
|
|
||||||
|
self.block_offset = session_offset[0]
|
||||||
|
self.tx_offset = session_offset[1]
|
||||||
|
logg.info('History syncer starting at {}:{}'.format(session_offset[0], session_offset[1]))
|
||||||
|
|
||||||
|
self.filter = []
|
||||||
|
|
||||||
|
|
||||||
|
"""Implements Syncer.get
|
||||||
|
|
||||||
|
BUG: Should also raise LoopDone when block array is empty after loop.
|
||||||
|
|
||||||
|
:param w3: Web3 object
|
||||||
|
:type w3: web3.Web3
|
||||||
|
:raises LoopDone: If a block is not found.
|
||||||
|
:return: Return a batch of blocks to process
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
def get(self, w3):
|
||||||
|
sync_db = self.bc_cache
|
||||||
|
height = self.bc_cache.get()
|
||||||
|
logg.debug('height {}'.format(height))
|
||||||
|
block_last = height[0]
|
||||||
|
tx_last = height[1]
|
||||||
|
if not self.running:
|
||||||
|
raise LoopDone((block_last, tx_last))
|
||||||
|
b = []
|
||||||
|
block_target = block_last + self.max
|
||||||
|
if block_target > self.target:
|
||||||
|
block_target = self.target
|
||||||
|
logg.debug('target {} last {} max {}'.format(block_target, block_last, self.max))
|
||||||
|
for i in range(block_last, block_target):
|
||||||
|
if i == self.target:
|
||||||
|
logg.info('reached target {}, exiting'.format(i))
|
||||||
|
self.running = False
|
||||||
|
break
|
||||||
|
bhash = w3.eth.getBlock(i).hash
|
||||||
|
b.append(bhash)
|
||||||
|
logg.debug('appending block {} {}'.format(i, bhash.hex()))
|
||||||
|
if block_last == block_target:
|
||||||
|
logg.info('aleady reached target {}, exiting'.format(self.target))
|
||||||
|
self.running = False
|
||||||
|
return b
|
50
apps/cic-eth/cic_eth/sync/mempool.py
Normal file
50
apps/cic-eth/cic_eth/sync/mempool.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
class MemPoolSyncer(Syncer):
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self, bc_cache):
|
||||||
|
raise NotImplementedError('incomplete, needs web3 tx to raw transaction conversion')
|
||||||
|
super(MemPoolSyncer, self).__init__(bc_cache)
|
||||||
|
# self.w3_filter = Syncer.w3.eth.filter('pending')
|
||||||
|
# for tx in tx_cache.txs:
|
||||||
|
# self.txs.append(tx)
|
||||||
|
# logg.debug('add tx {} to mempoolsyncer'.format(tx))
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def get(self):
|
||||||
|
# return self.w3_filter.get_new_entries()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def process(self, tx_hash):
|
||||||
|
# tx_hash_hex = tx_hash.hex()
|
||||||
|
# if tx_hash_hex in self.txs:
|
||||||
|
# logg.debug('syncer already watching {}, skipping'.format(tx_hash_hex))
|
||||||
|
# tx = self.w3.eth.getTransaction(tx_hash_hex)
|
||||||
|
# serialized_tx = rlp.encode({
|
||||||
|
# 'nonce': tx.nonce,
|
||||||
|
# 'from': getattr(tx, 'from'),
|
||||||
|
# })
|
||||||
|
# logg.info('add {} to syncer: {}'.format(tx, serialized_tx))
|
||||||
|
# otx = Otx(
|
||||||
|
# nonce=tx.nonce,
|
||||||
|
# address=getattr(tx, 'from'),
|
||||||
|
# tx_hash=tx_hash_hex,
|
||||||
|
# signed_tx=serialized_tx,
|
||||||
|
# )
|
||||||
|
# Otx.session.add(otx)
|
||||||
|
# Otx.session.commit()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def loop(self, interval):
|
||||||
|
# while Syncer.running:
|
||||||
|
# logg.debug('loop execute')
|
||||||
|
# txs = self.get()
|
||||||
|
# logg.debug('got txs {}'.format(txs))
|
||||||
|
# for tx in txs:
|
||||||
|
# #block_number = self.process(block.hex())
|
||||||
|
# self.process(tx)
|
||||||
|
# #if block_number > self.bc_cache.head():
|
||||||
|
# # self.bc_cache.head(block_number)
|
||||||
|
# time.sleep(interval)
|
||||||
|
# logg.info("Syncer no longer set to run, gracefully exiting")
|
||||||
|
|
||||||
|
|
103
apps/cic-eth/cic_eth/sync/mined.py
Normal file
103
apps/cic-eth/cic_eth/sync/mined.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# local impotes
|
||||||
|
from .base import Syncer
|
||||||
|
from cic_eth.queue.tx import set_final_status
|
||||||
|
from cic_eth.eth import RpcClient
|
||||||
|
|
||||||
|
app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class MinedSyncer(Syncer):
|
||||||
|
"""Base implementation of block processor for mined blocks.
|
||||||
|
|
||||||
|
Loops through all transactions,
|
||||||
|
|
||||||
|
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
||||||
|
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, bc_cache):
|
||||||
|
super(MinedSyncer, self).__init__(bc_cache)
|
||||||
|
self.block_offset = 0
|
||||||
|
self.tx_offset = 0
|
||||||
|
|
||||||
|
|
||||||
|
def process(self, w3, ref):
|
||||||
|
"""Processes transactions in a single block, advancing transaction (and block) cursor accordingly.
|
||||||
|
|
||||||
|
:param w3: Web3 object
|
||||||
|
:type w3: web3.Web3
|
||||||
|
:param ref: Block reference (hash) to process
|
||||||
|
:type ref: str, 0x-hex
|
||||||
|
:returns: Block number of next unprocessed block
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
b = w3.eth.getBlock(ref)
|
||||||
|
c = w3.eth.getBlockTransactionCount(ref)
|
||||||
|
s = 0
|
||||||
|
if self.block_offset == b.number:
|
||||||
|
s = self.tx_offset
|
||||||
|
|
||||||
|
logg.debug('processing {} (blocknumber {}, count {}, offset {})'.format(ref, b.number, c, s))
|
||||||
|
|
||||||
|
for i in range(s, c):
|
||||||
|
tx = w3.eth.getTransactionByBlock(ref, i)
|
||||||
|
tx_hash_hex = tx['hash'].hex()
|
||||||
|
rcpt = w3.eth.getTransactionReceipt(tx_hash_hex)
|
||||||
|
logg.debug('{}/{} processing tx {} from block {} {}'.format(i+1, c, tx_hash_hex, b.number, ref))
|
||||||
|
ours = False
|
||||||
|
# TODO: ensure filter loop can complete on graceful shutdown
|
||||||
|
for f in self.filter:
|
||||||
|
#try:
|
||||||
|
task_uuid = f(w3, tx, rcpt, self.chain())
|
||||||
|
#except Exception as e:
|
||||||
|
# logg.error('error in filter {} tx {}: {}'.format(f, tx_hash_hex, e))
|
||||||
|
# continue
|
||||||
|
if task_uuid != None:
|
||||||
|
logg.debug('tx {} passed to celery task {}'.format(tx_hash_hex, task_uuid))
|
||||||
|
s = celery.signature(
|
||||||
|
'set_final_status',
|
||||||
|
[tx_hash_hex, rcpt['blockNumber'], not rcpt['status']],
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
break
|
||||||
|
next_tx = i + 1
|
||||||
|
if next_tx == c:
|
||||||
|
self.bc_cache.set(b.number+1, 0)
|
||||||
|
else:
|
||||||
|
self.bc_cache.set(b.number, next_tx)
|
||||||
|
if c == 0:
|
||||||
|
logg.info('synced block {} has no transactions'.format(b.number))
|
||||||
|
#self.bc_cache.session(b.number+1, 0)
|
||||||
|
self.bc_cache.set(b.number+1, 0)
|
||||||
|
return b['number']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def loop(self, interval):
|
||||||
|
"""Loop running until the "running" property of Syncer is set to False.
|
||||||
|
|
||||||
|
Retrieves latest unprocessed blocks and processes them.
|
||||||
|
|
||||||
|
:param interval: Delay in seconds until next attempt if no new blocks are found.
|
||||||
|
:type interval: int
|
||||||
|
"""
|
||||||
|
while self.running and Syncer.running_global:
|
||||||
|
self.bc_cache.connect()
|
||||||
|
c = RpcClient(self.chain())
|
||||||
|
logg.debug('loop execute')
|
||||||
|
e = self.get(c.w3)
|
||||||
|
logg.debug('got blocks {}'.format(e))
|
||||||
|
for block in e:
|
||||||
|
block_number = self.process(c.w3, block.hex())
|
||||||
|
logg.info('processed block {} {}'.format(block_number, block.hex()))
|
||||||
|
self.bc_cache.disconnect()
|
||||||
|
time.sleep(interval)
|
||||||
|
logg.info("Syncer no longer set to run, gracefully exiting")
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user