Compare commits

..

92 Commits

Author SHA1 Message Date
nolash 14ddab1b34
Add chain reader arg flags to trackerd 2021-11-03 13:03:23 +01:00
nolash 7217cb7f46
Bump version for cic-cache python package build 2021-11-03 11:19:44 +01:00
nolash 2ebcd3e3de Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-11-02 18:49:49 +01:00
nolash c440b049cc
Add config dirs 2021-11-02 16:35:44 +01:00
nolash 09034af5bc
Bump cic-eth version 2021-11-02 16:03:29 +01:00
nolash dc80bae673
Upgrade cic-eth in migrations 2021-11-02 15:31:00 +01:00
nolash d88ae00b72
Add celery cli args with defaults from redis 2021-10-31 07:58:35 +01:00
nolash 7a366edb9d
WIP rehabilitate cic-eth-inspect 2021-10-30 19:09:17 +02:00
nolash 0b912b99b6
Add role listing to cic-eth tag cli tool 2021-10-30 13:19:31 +02:00
nolash cbd4aef004
Add action confirm on sweep script 2021-10-30 10:25:39 +02:00
nolash 6f7f91780b
Add script to sweep gas from signer accounts 2021-10-30 09:02:04 +02:00
nolash 83ecdaf023
Connect token filter to tracker 2021-10-29 16:35:11 +02:00
nolash e2ef9b43c8
Reactivate cic-eth-tasker dependency for bootstrap 2021-10-29 15:58:34 +02:00
nolash 6e58e4e4de
Remove nasty residue from bootstrap 2021-10-29 14:40:06 +02:00
nolash f46c9b0e7d Merge remote-tracking branch 'origin/master' into lash/bloxberg-seeding 2021-10-29 11:39:40 +02:00
nolash 6ca3fd55d7
Add gas cache oracle connection for erc20 2021-10-29 08:45:42 +02:00
nolash 258ed420b8 Merge branch 'lash/tmp-bloxberg-seeding' into lash/bloxberg-seeding 2021-10-29 07:35:08 +02:00
nolash 1c022e9853
Added changes to wrong branch 2021-10-29 07:33:38 +02:00
nolash d35e144723
Register gas cache only for registered tokens 2021-10-29 07:00:25 +02:00
nolash fb953d0318
Add gas cache backend, test, filter 2021-10-28 21:45:47 +02:00
nolash 858bbdb69a Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-28 14:36:45 +02:00
nolash 66e23e4e20
Test config cleanup 2021-10-28 14:11:11 +02:00
nolash 546256c86a
Better gas gifting amounts and thresholds estimation, fix broken cic-eth imports 2021-10-28 13:34:39 +02:00
nolash d9720bd0aa Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:41:27 +02:00
nolash e9e9f66d97
Correct wrong change for docker registries 2021-10-28 05:39:44 +02:00
nolash 0d640fab57 Merge remote-tracking branch 'origin/lash/local-dev-improve' into lash/bloxberg-seeding 2021-10-28 05:29:07 +02:00
nolash 4ce85bc824
Remove faulty default registry in dockerfiles 2021-10-28 05:27:13 +02:00
nolash ce67f83457
Remove faulty default registry in docker compose 2021-10-28 05:24:11 +02:00
nolash 13f2e17931
Remove accidental 0 value override for syncer offset to trackers 2021-10-28 05:18:54 +02:00
nolash f236234682 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-27 16:58:38 +02:00
nolash 1f37632f0f
WIP Replace env vars in data-seeding with well-known 2021-10-27 16:56:03 +02:00
nolash 03d7518f8c Merge branch 'lash/local-dev-improve' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/local-dev-improve 2021-10-27 11:52:31 +02:00
nolash 67152d0df1
Replace KEYSTORE_PATH with WALLET_KEY_FILE in data seeding 2021-10-27 11:51:20 +02:00
PhilipWafula 9168322941
Revert base image changes. 2021-10-27 12:41:35 +03:00
PhilipWafula 2fbd338e24
Adds correct base image. 2021-10-27 11:44:23 +03:00
PhilipWafula c7d7f2a64d
Remove force reset. 2021-10-27 11:44:08 +03:00
PhilipWafula 16153df2f0
Resolve creation of phone dir when it already exists. 2021-10-27 11:43:35 +03:00
nolash 4391fa3aff Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-25 21:01:27 +02:00
nolash cd602dee49
Remove WIP docker compose file 2021-10-25 20:12:32 +02:00
nolash a548ba6fce
Chainlib upgrade to handle none receipts, rpc node debug output in bootstrap 2021-10-25 20:09:35 +02:00
nolash a6de7e9fe0 Merge remote-tracking branch 'origin/master' into lash/local-dev-improve 2021-10-20 20:02:19 +02:00
nolash e705a94873
Resolve notify/ussd dependency conflict 2021-10-20 10:07:19 +02:00
nolash 3923de0a81
Update pip args handling in notify 2021-10-19 23:01:55 +02:00
nolash 5c0250b5b9
Rehabilitate cic-cache db migration 2021-10-19 22:58:10 +02:00
nolash 3285d8dfe5
Implement asynchronous deploys in bootstrap 2021-10-19 22:08:17 +02:00
nolash 9d349f1579
Add debug level env var to bootstrap dev container 2021-10-19 19:54:59 +02:00
nolash 837a1770d1
Upgrade deps more chainlib in bootstrap 2021-10-19 10:10:39 +02:00
PhilipWafula 003febec9d
Bumps contract migration deps. 2021-10-19 10:38:21 +03:00
PhilipWafula f066a32ce8
Adds libffi-dev for local git-tea. 2021-10-19 10:38:08 +03:00
nolash ad493705ad
Upgrade deps 2021-10-18 17:16:28 +02:00
nolash b765c4ab88
More wrestling with chainlib-eth deps 2021-10-18 17:06:31 +02:00
nolash e4935d3b58 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 16:49:58 +02:00
nolash f88f0e321b
Upgrade chainlib-eth dep 2021-10-18 16:48:14 +02:00
PhilipWafula 31fa721397
Add cic-notify container 2021-10-18 17:17:53 +03:00
PhilipWafula 16481da193
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 16:54:23 +03:00
PhilipWafula 97a48cd8c6
Improves ussd deps. 2021-10-18 16:53:38 +03:00
nolash 7732412341 Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 15:51:38 +02:00
nolash 649b124a61
Ugprade chainqueue dep 2021-10-18 15:50:45 +02:00
PhilipWafula 7601e3eeff
Corrects breakages in cic-ussd 2021-10-18 15:19:32 +03:00
PhilipWafula 60a9efc88b
Merge remote-tracking branch 'origin/lash/split-migration' into lash/split-migration 2021-10-18 15:18:33 +03:00
PhilipWafula 45011b58c4
Cleans up configs. 2021-10-18 15:11:31 +03:00
nolash f1a0b4ee7c Merge branch 'lash/split-migration' of gitlab.com:grassrootseconomics/cic-internal-integration into lash/split-migration 2021-10-18 14:10:52 +02:00
nolash c57abb7ad5
Upgrade deps in cic-eth, allow for new chain spec format 2021-10-18 14:08:39 +02:00
PhilipWafula 930a99c974
Bumps cic-types version. 2021-10-18 06:52:49 +03:00
PhilipWafula b0935caab8
Fixes imports. 2021-10-18 06:52:28 +03:00
nolash bdd5f6fcec
Update readme in data seeding 2021-10-17 19:37:29 +02:00
nolash a293c2460e
Consolidate dir handling in data seeding scripts 2021-10-17 19:27:15 +02:00
nolash 0ee6400d7d
WIP rehabilitate ussd builds 2021-10-17 18:32:08 +02:00
nolash 677fb346fd
Add data seeding preparation step, rehabilitation of non-custodial seeding 2021-10-17 18:05:00 +02:00
nolash ea3c75e755
Rehabilitate traffic script 2021-10-17 14:30:42 +02:00
nolash 0b2f22c416
Rehabilitate cic-user-server 2021-10-16 20:54:41 +02:00
nolash 24385ea27d
Rehabilitate cic-cache 2021-10-16 14:03:05 +02:00
nolash 9a154a8046
WIP rehabilitate cic-cache 2021-10-16 08:23:32 +02:00
nolash d3576c8ec7
Add eth retrier to new docker compose file 2021-10-16 07:08:44 +02:00
nolash 79ee2bf4ff
Add eth tracker, dispatcher to new docker compose file 2021-10-16 07:04:19 +02:00
nolash 89ac70371a
Remove single function worker in test 2021-10-16 00:18:08 +02:00
nolash 5ea0318b0b
Fix default token symbol config setting for aux 2021-10-15 23:21:57 +02:00
nolash 5dfb96ec0c
Add new cic-signer app 2021-10-15 23:11:00 +02:00
nolash 4634ac41df Merge remote-tracking branch 'origin/master' into lash/split-migration 2021-10-15 22:19:01 +02:00
nolash 97f4fe8ca7
refactor docker-compose cic-eth-tasker, bootstrap (aka contract migration) 2021-10-15 22:16:45 +02:00
nolash b36529f7fa
WIP local docker registry adaptations 2021-10-15 20:27:03 +02:00
nolash a6675f2348
Add environment sourcing for cic-eth-tasker docker compose 2021-10-15 18:52:37 +02:00
nolash e3116d74d6
No export 2021-10-15 12:54:16 +02:00
nolash c0bbdc9bec
Add missing file 2021-10-15 08:43:04 +02:00
nolash 396bd4f300
update preliminary readme 2021-10-15 08:38:01 +02:00
nolash 58547b4067
Bump cic-eth-registry 2021-10-15 07:44:50 +02:00
nolash 9009815d78
Add trust address to contract migration config, get cic-eth default token from registry 2021-10-14 21:31:04 +02:00
nolash 2da19f5819
Add basic connectivity config directives 2021-10-14 17:40:53 +02:00
nolash 3948d5aa40
Add custodial initialization 2021-10-14 17:18:49 +02:00
nolash ed432abb23
WIP refactor custodial initialization 2021-10-14 14:37:48 +02:00
nolash f251b8b729
Remove dead code 2021-10-14 11:35:08 +02:00
nolash 36e791e08a
Split contract migration into three separate steps 2021-10-14 11:33:50 +02:00
223 changed files with 2073 additions and 7858 deletions

View File

@ -30,8 +30,6 @@ version:
#image: python:3.7-stretch
image: registry.gitlab.com/grassrootseconomics/cic-base-images/ci-version:b01318ae
stage: version
tags:
- integration
script:
- mkdir -p ~/.ssh && chmod 700 ~/.ssh
- ssh-keyscan gitlab.com >> ~/.ssh/known_hosts && chmod 644 ~/.ssh/known_hosts

View File

@ -1,44 +0,0 @@
<!---
Please read this!
Before opening a new issue, make sure to search for keywords in the issues
filtered by the "bug" label:
- https://gitlab.com/groups/grassrootseconomics/-/issues?scope=all&state=all&label_name[]=bug
and verify the issue you're about to submit isn't a duplicate.
--->
### Summary
<!-- Summarize the bug encountered concisely. -->
### Steps to reproduce
<!-- Describe how one can reproduce the issue - this is very important. Please use an ordered list. -->
### Example Project
<!-- If possible, please create an example project here on GitLab.com that exhibits the problematic
behavior, and link to it here in the bug report. If you are using an older version of GitLab, this
will also determine whether the bug is fixed in a more recent version. -->
### What is the current *bug* behavior?
<!-- Describe what actually happens. -->
### What is the expected *correct* behavior?
<!-- Describe what you should see instead. -->
### Relevant logs and/or screenshots
<!-- Paste any relevant logs - please use code blocks (```) to format console output, logs, and code
as it's tough to read otherwise. -->
### Possible fixes
<!-- If you can, link to the line of code that might be responsible for the problem. -->
/label ~"bug"

View File

@ -1,83 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at [INSERT EMAIL ADDRESS]. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq
...Try to keep in mind the immortal
words of Bill and Ted, "Be excellent to each other."

View File

@ -1,16 +0,0 @@
Hello and welcome to the CIC Stack repository. Targeted for use with the ethereum virtual machine and a ussd capable telecom provider.
__To request a change to the code please fork this repository and sumbit a merge request.__
__If there is a Grassroots Economics Kanban Issue please include that in our MR it will help us track contributions. Karibu sana!__
__Visit the Development Kanban board here: https://gitlab.com/grassrootseconomics/cic-internal-integration/-/boards/2419764__
__Ask a question in our dev chat:__
[Mattermost](https://chat.grassrootseconomics.net/cic/channels/dev)
[Discord](https://discord.gg/XWunwAsX)
[Matrix, IRC soon?]

View File

@ -1,117 +0,0 @@
# CORE TEAM CONTRIBUTION GUIDE
# 1. Transparency
1.1 Use work logs for reflection of work done, aswell as telling your peers about changes that may affect their own tasks
1.2 A work log SHOULD be submitted after a "unit of work" is complete.
1.2.1 A "unit of work" should not span more than one full day's worth of work.
1.2.2 A "unit of work" should be small enough that the log entries give useful insight.
1.3 Individual logs are reviewed in weekly meetings
<!--1.4 Bullet point list of topics and one or more sub-points describing each item in short sentences, eg;
```
- Core
* fixed foo
* fixed bar
- Frontend
* connected bar to baz
```-->
1.4 Work log format is defined in []()
1.5 Link to issue/MR in bullet point where appropriate
1.6
# 2. Code hygiene
2.1 Keep function names and variable names short
2.2 Keep code files, functions and test fixtures short
2.3 The less magic the better. Recombinable and replaceable is king
2.4 Group imports by `standard`, `external`, `local`, `test` - in that order
2.5 Only auto-import when necessary, and always with a minimum of side-effects
2.6 Use custom errors. Let them bubble up
2.7 No logs in tight loops
2.8 Keep executable main routine minimal. Pass variables (do not use globals) in main business logic function
2.9 Test coverage MUST be kept higher than 90% after changes
2.10 Docstrings. Always. Always!
# 3. Versioning
3.1 Use [Semantic Versioning](https://semver.org/)
3.2 When merging code, explicit dependencies SHOULD NOT use pre-release version
# 4. Issues
4.1 Issue title should use [Convention Commit structure](https://www.conventionalcommits.org/en/v1.0.0-beta.2/)
4.2 Issues need proper problem statement
4.2.1. What is the current state
4.2.2. If current state is not behaving as expected, what was the expected state
4.2.3. What is the desired new state.
4.3 Issues need proper resolution statement
4.3.1. Bullet point list of short sentences describing practical steps to reach desired state
4.3.2. Builet point list of external resources informing the issue and resolution
4.4 Tasks needs to be appropriately labelled using GROUP labels.
# 5. Code submission
5.1 A branch and new MR is always created BEFORE THE WORK STARTS
5.2 An MR should solve ONE SINGLE PART of a problem
5.3 Every MR should have at least ONE ISSUE associated with it. Ideally issue can be closed when MR is merged
5.4 MRs should not be open for more than one week (during normal operation periods)
5.5 MR should ideally not be longer than 400 lines of changes of logic
5.6 MRs that MOVE or DELETE code should not CHANGE that same code in a single MR. Scope MOVEs and DELETEs in separate commits (or even better, separate MRs) for transparency
# 6. Code reviews
6.1 At least one peer review before merge
6.2 If MR is too long, evaluate whether this affects the quality of the review negatively. If it does, expect to be asked to split it up
6.3 Evaluate changes against associated issues' problem statement and proposed resolution steps. If there is a mismatch, either MR needs to change or issue needs to be amended accordingly
6.4 Make sure all technical debt introduced by MR is documented in issues. Add them according to criteria in section ISSUES if not
6.5 If CI is not working, reviewer MUST make sure code builds and runs
6.6 Behave!
6.6.1 Don't be a jerk
6.6.2 Don't block needlessly
6.6.3 Say please

661
LICENSE
View File

@ -1,661 +0,0 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
cic-internal-integration
Copyright (C) 2021 Grassroots Economics
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

View File

@ -1,19 +1,41 @@
# Community Inclusion Currency Stack (CIC Stack)
A custodial evm wallet for executing transactions via USSD
# cic-internal-integration
## Getting started
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
```
export COMPOSE_DOCKER_CLI_BUILD=1
export DOCKER_BUILDKIT=1
```
To get started see [./apps/contract-migration/README.md](./apps/contract-migration/README.md)
start services, database, redis and local ethereum node
```
docker-compose up -d
```
## Documentation
Run app/contract-migration to deploy contracts
```
RUN_MASK=3 docker-compose up contract-migration
```
[https://docs.grassecon.org/software/](https://docs.grassecon.org/software/)
stop cluster
```
docker-compose down
```
stop cluster and delete data
```
docker-compose down -v --remove-orphans
```
rebuild an images
```
docker-compose up --build <service_name>
```
to delete the buildkit cache
```
docker builder prune --filter type=exec.cachemount
```

View File

@ -1 +1 @@
include *requirements.txt cic_cache/data/config/* cic_cache/db/migrations/default/* cic_cache/db/migrations/default/versions/*
include *requirements.txt cic_cache/data/config/*

View File

@ -1 +0,0 @@
from .cache import BloomCache

View File

@ -17,4 +17,5 @@ class ArgumentParser(BaseArgumentParser):
self.add_argument('--offset', type=int, help='Start block height for initial history sync')
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
if local_arg_flags & CICFlag.CHAIN:
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
self.add_argument('--registry-address', type=str, dest='registry_address', help='CIC registry contract address')
self.add_argument('--trust-address', type=str, dest='trust_address', action='append', help='Add trust address')

View File

@ -39,6 +39,9 @@ class Config(BaseConfig):
if local_arg_flags & CICFlag.CHAIN:
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
trust_addresses = getattr(args, 'trust_address', None)
if trust_addresses != None:
local_args_override['CIC_TRUST_ADDRESS'] = ','.join(trust_addresses)
if local_arg_flags & CICFlag.CELERY:
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')

View File

@ -1,4 +1,4 @@
[cic]
registry_address =
trust_address =
health_modules =
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas

View File

@ -3,8 +3,7 @@ engine =
driver =
host =
port =
#name = cic-cache
prefix =
name = cic-cache
user =
password =
debug = 0

View File

@ -9,26 +9,21 @@ from .list import (
tag_transaction,
add_tag,
)
from cic_cache.db.models.base import SessionBase
logg = logging.getLogger()
def dsn_from_config(config, name):
def dsn_from_config(config):
scheme = config.get('DATABASE_ENGINE')
if config.get('DATABASE_DRIVER') != None:
scheme += '+{}'.format(config.get('DATABASE_DRIVER'))
database_name = name
if config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(config.get('DATABASE_PREFIX'), database_name)
dsn = ''
if config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.poolable = False
dsn = '{}:///{}'.format(
scheme,
database_name,
config.get('DATABASE_NAME'),
)
else:
@ -38,7 +33,7 @@ def dsn_from_config(config, name):
config.get('DATABASE_PASSWORD'),
config.get('DATABASE_HOST'),
config.get('DATABASE_PORT'),
database_name,
config.get('DATABASE_NAME'),
)
logg.debug('parsed dsn from config: {}'.format(dsn))
return dsn

View File

@ -5,11 +5,7 @@ import re
import base64
# external imports
from hexathon import (
add_0x,
strip_0x,
)
from chainlib.encode import TxHexNormalizer
from hexathon import add_0x
# local imports
from cic_cache.cache import (
@ -20,72 +16,27 @@ from cic_cache.cache import (
logg = logging.getLogger(__name__)
#logg = logging.getLogger()
re_transactions_all_bloom = r'/tx/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_transactions_all_data = r'/txa/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
re_default_limit = r'/defaultlimit/?'
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
DEFAULT_LIMIT = 100
tx_normalize = TxHexNormalizer()
def parse_query_account(r):
address = strip_0x(r[1])
#address = tx_normalize.wallet_address(address)
limit = DEFAULT_LIMIT
g = r.groups()
if len(g) > 3:
limit = int(r[4])
if limit == 0:
limit = DEFAULT_LIMIT
offset = 0
if len(g) > 4:
offset = int(r[6])
logg.debug('account query is address {} offset {} limit {}'.format(address, offset, limit))
return (address, offset, limit,)
# r is an re.Match
def parse_query_any(r):
limit = DEFAULT_LIMIT
offset = 0
block_offset = None
block_end = None
if r.lastindex != None:
if r.lastindex > 0:
limit = int(r[1])
if r.lastindex > 1:
offset = int(r[2])
if r.lastindex > 2:
block_offset = int(r[3])
if r.lastindex > 3:
block_end = int(r[4])
if block_end < block_offset:
raise ValueError('cart before the horse, dude')
logg.debug('data query is offset {} limit {} block_offset {} block_end {}'.format(offset, limit, block_offset, block_end))
return (offset, limit, block_offset, block_end,)
def process_default_limit(session, env):
r = re.match(re_default_limit, env.get('PATH_INFO'))
if not r:
return None
return ('application/json', str(DEFAULT_LIMIT).encode('utf-8'),)
def process_transactions_account_bloom(session, env):
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
if not r:
return None
logg.debug('match account bloom')
(address, offset, limit,) = parse_query_account(r)
address = r[1]
if r[2] == None:
address = add_0x(address)
offset = 0
if r.lastindex > 2:
offset = r[4]
limit = DEFAULT_LIMIT
if r.lastindex > 4:
limit = r[6]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
@ -108,9 +59,13 @@ def process_transactions_all_bloom(session, env):
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
if not r:
return None
logg.debug('match all bloom')
(limit, offset, block_offset, block_end,) = parse_query_any(r)
offset = DEFAULT_LIMIT
if r.lastindex > 0:
offset = r[1]
limit = 0
if r.lastindex > 1:
limit = r[2]
c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
@ -133,16 +88,17 @@ def process_transactions_all_data(session, env):
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
if not r:
return None
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
# return None
logg.debug('match all data')
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
return None
logg.debug('got data request {}'.format(env))
(offset, limit, block_offset, block_end) = parse_query_any(r)
block_offset = r[1]
block_end = r[2]
if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude')
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset, block_end, oldest=True) # oldest needs to be settable
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
@ -157,30 +113,3 @@ def process_transactions_all_data(session, env):
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)
def process_transactions_account_data(session, env):
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
if not r:
return None
logg.debug('match account data')
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
# return None
(address, offset, limit,) = parse_query_account(r)
c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
for r in tx_cache:
r['date_block'] = r['date_block'].timestamp()
o = {
'low': lowest_block,
'high': highest_block,
'data': tx_cache,
}
j = json.dumps(o)
return ('application/json', j.encode('utf-8'),)

View File

@ -12,20 +12,21 @@ import cic_cache.cli
from cic_cache.db import dsn_from_config
from cic_cache.db.models.base import SessionBase
from cic_cache.runnable.daemons.query import (
process_default_limit,
process_transactions_account_bloom,
process_transactions_account_data,
process_transactions_all_bloom,
process_transactions_all_data,
)
import cic_cache.cli
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations')
arg_flags = cic_cache.cli.argflag_std_read
local_arg_flags = cic_cache.cli.argflag_local_sync | cic_cache.cli.argflag_local_task
# process args
arg_flags = cic_cache.cli.argflag_std_base
local_arg_flags = cic_cache.cli.argflag_local_task
argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args()
@ -34,7 +35,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database
dsn = dsn_from_config(config, 'cic_cache')
dsn = dsn_from_config(config)
SessionBase.connect(dsn, config.true('DATABASE_DEBUG'))
@ -46,11 +47,9 @@ def application(env, start_response):
session = SessionBase.create_session()
for handler in [
process_transactions_account_data,
process_transactions_account_bloom,
process_transactions_all_data,
process_transactions_all_bloom,
process_default_limit,
process_transactions_account_bloom,
]:
r = None
try:

View File

@ -3,7 +3,6 @@ import logging
import os
import sys
import argparse
import tempfile
# third-party imports
import celery
@ -29,7 +28,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database
dsn = dsn_from_config(config, 'cic_cache')
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
# set up celery

View File

@ -40,7 +40,7 @@ logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
# process args
arg_flags = cic_cache.cli.argflag_std_base
arg_flags = cic_cache.cli.argflag_std_read
local_arg_flags = cic_cache.cli.argflag_local_sync
argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
@ -50,7 +50,7 @@ args = argparser.parse_args()
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
# connect to database
dsn = dsn_from_config(config, 'cic_cache')
dsn = dsn_from_config(config)
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
# set up rpc

View File

@ -5,7 +5,7 @@ version = (
0,
2,
1,
'alpha.3',
'rc.1',
)
version_object = semver.VersionInfo(

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis:///
result_url = redis:///

View File

@ -0,0 +1,3 @@
[cic]
registry_address =
trust_address =

View File

@ -0,0 +1,9 @@
[database]
NAME=cic_cache
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -0,0 +1,3 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379

View File

@ -0,0 +1,3 @@
[cic]
registry_address =
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C

View File

@ -0,0 +1,9 @@
[database]
NAME=cic_cache
USER=grassroots
PASSWORD=
HOST=localhost
PORT=63432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=0

View File

@ -0,0 +1,4 @@
[syncer]
loop_interval = 1
offset = 0
no_history = 0

View File

@ -0,0 +1,2 @@
[bancor]
dir =

View File

@ -1,3 +1,4 @@
[cic]
registry_address =
chain_spec =
trust_address =

View File

@ -1,5 +1,5 @@
[database]
PREFIX=cic-cache-test
NAME=cic-cache-test
USER=postgres
PASSWORD=
HOST=localhost

View File

@ -0,0 +1,5 @@
[eth]
#ws_provider = ws://localhost:8546
#ttp_provider = http://localhost:8545
provider = http://localhost:8545
#chain_id =

View File

@ -1,4 +1,4 @@
openapi: "3.0.2"
openapi: "3.0.3"
info:
title: Grassroots Economics CIC Cache
description: Cache of processed transaction data from Ethereum blockchain and worker queues
@ -9,34 +9,17 @@ info:
email: will@grassecon.org
license:
name: GPLv3
version: 0.2.0
version: 0.1.0
paths:
/defaultlimit:
summary: The default limit value of result sets.
get:
tags:
- transactions
description:
Retrieve default limit
operationId: limit.default
responses:
200:
description: Limit query successful
content:
application/json:
schema:
$ref: "#/components/schemas/Limit"
/tx:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
/tx/{offset}/{limit}:
description: Bloom filter for batch of latest transactions
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest
operationId: tx.get
responses:
200:
description: Transaction query successful.
@ -46,153 +29,27 @@ paths:
$ref: "#/components/schemas/BlocksBloom"
/tx/{limit}:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.limit
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions
description: Generate a bloom filter of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range
responses:
200:
description: Transaction query successful. Results are ordered from newest to oldest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}/{block_offset}:
summary: Bloom filter for batch of transactions since a particular block.
description: Generate a bloom filter of the latest transactions since a particular block in the cache. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range.block.offset
responses:
200:
description: Transaction query successful. Results are ordered from oldest to newest.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
/tx/{limit}/{offset}/{block_offset}/{block_end}:
summary: Bloom filter for batch of transactions within a particular block range.
description: Generate a bloom filter of the latest transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
/tx/{address}/{offset}/{limit}:
description: Bloom filter for batch of latest transactions by account
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.latest.range.block.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/tx/{address}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user
operationId: tx.get
responses:
200:
description: Transaction query successful.
@ -201,30 +58,6 @@ paths:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
/tx/{address}/{limit}:
summary: Bloom filter for batch of latest transactions by account.
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
@ -232,317 +65,26 @@ paths:
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/tx/{address}/{limit}/{offset}:
summary: Bloom filter for batch of latest transactions by account
description: Generate a bloom filter of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: tx.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/BlocksBloom"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/txa:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. The number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
/txa/{limit}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}:
summary: Cached data for latest transactions.
description: Return data entries of the latest transactions in the cache. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}:
summary: Cached data for transactions since a particular block.
description: Return cached data entries of transactions since a particular block. The block parameter is inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.offset
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
/txa/{limit}/{offset}/{block_offset}/{block_end}:
summary: Cached data for transactions within a particular block range.
description: Return cached data entries of transactions within a particular block range in the cache. The block parameters are inclusive. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.latest.range.block.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_offset
in: path
required: true
schema:
type: integer
format: int32
- name: block_end
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
/txa/{address}/{limit}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.limit
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
/txa/{address}/{limit}/{offset}:
summary: Cached data for batch of latest transactions by account.
description: Return cached data of the latest transactions where a specific account is the spender or beneficiary. If `limit` is 0, the number of maximum number of transactions returned is returned by the `/defaultlimit` API call.
get:
tags:
- transactions
description:
Retrieve transactions
operationId: txa.get.user.range
responses:
200:
description: Transaction query successful.
content:
application/json:
schema:
$ref: "#/components/schemas/TransactionList"
parameters:
- name: address
in: path
required: true
schema:
type: string
- name: limit
in: path
required: true
schema:
type: integer
format: int32
- name: offset
in: path
required: true
schema:
type: integer
format: int32
components:
schemas:
Limit:
type: integer
format: int32
BlocksBloom:
type: object
properties:
low:
type: integer
type: int
format: int32
description: The lowest block number included in the filter
high:
type: integer
format: int32
description: The highest block number included in the filter
block_filter:
type: string
format: byte
@ -555,89 +97,6 @@ components:
type: string
description: Hashing algorithm (currently only using sha256)
filter_rounds:
type: integer
type: int
format: int32
description: Number of hash rounds used to create the filter
TransactionList:
type: object
properties:
low:
type: integer
format: int32
description: The lowest block number included in the result set
high:
type: integer
format: int32
description: The highest block number included in the filter
data:
type: array
description: Cached transaction data
items:
$ref: "#/components/schemas/Transaction"
Transaction:
type: object
properties:
block_number:
type: integer
format: int64
description: Block number transaction was included in.
tx_hash:
type: string
description: Transaction hash, in hex.
date_block:
type: integer
format: int32
description: Block timestamp.
sender:
type: string
description: Spender address, in hex.
recipient:
type: string
description: Beneficiary address, in hex.
from_value:
type: integer
format: int64
description: Value deducted from spender's balance.
to_value:
type: integer
format: int64
description: Value added to beneficiary's balance.
source_token:
type: string
description: Network address of token in which `from_value` is denominated.
destination_token:
type: string
description: Network address of token in which `to_value` is denominated.
success:
type: boolean
description: Network consensus state on whether the transaction was successful or not.
tx_type:
type: string
enum:
- erc20.faucet
- faucet.give_to
examples:
data_last:
summary: Get the latest cached transactions, using the server's default limit.
value: "/txa"
data_limit:
summary: Get the last 42 cached transactions.
value: "/txa/42"
data_range:
summary: Get the next 42 cached transactions, starting from the 13th (zero-indexed).
value: "/txa/42/13"
data_range_block_offset:
summary: Get the next 42 cached transactions, starting from block 1337 (inclusive).
value: "/txa/42/0/1337"
data_range_block_offset:
summary: Get the next 42 cached transactions within blocks 1337 and 1453 (inclusive).
value: "/txa/42/0/1337/1453"
data_range_block_range:
summary: Get the next 42 cached transactions after the 13th, within blocks 1337 and 1453 (inclusive).
value: "/txa/42/13/1337/1453"

View File

@ -4,9 +4,9 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
COPY requirements.txt .
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple
ARG PIP_INDEX_URL="https://pypi.org/simple"
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
@ -14,9 +14,14 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt
COPY . .
COPY . .
RUN pip install . --extra-index-url $EXTRA_PIP_INDEX_URL
RUN python setup.py install
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-cache/
# for db migrations
COPY ./aux/wait-for-it/wait-for-it.sh ./

View File

@ -2,5 +2,5 @@
set -e
>&2 echo executing database migration
python scripts/migrate_cic_cache.py --migrations-dir /usr/local/share/cic-cache/alembic -vv
python scripts/migrate.py -c /usr/local/etc/cic-cache --migrations-dir /usr/local/share/cic-cache/alembic -vv
set +e

View File

@ -2,7 +2,7 @@
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net \
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r test_requirements.txt

View File

@ -1,15 +1,14 @@
alembic==1.4.2
confini~=0.5.3
confini~=0.4.2
uwsgi==2.0.19.1
moolb~=0.2.0
cic-eth-registry~=0.6.6
moolb~=0.1.1b2
cic-eth-registry~=0.6.1a1
SQLAlchemy==1.3.20
semver==2.13.0
psycopg2==2.8.6
celery==4.4.7
redis==3.5.3
chainsyncer[sql]~=0.0.7
erc20-faucet~=0.3.2
chainlib-eth~=0.0.15
eth-address-index~=0.2.4
okota~=0.2.5
chainsyncer[sql]>=0.0.7a3,<0.1.0
erc20-faucet>=0.3.2a2, <0.4.0
chainlib-eth>=0.0.10a20,<0.1.0
eth-address-index>=0.2.3a4,<0.3.0

View File

@ -1,55 +1,54 @@
#!/usr/bin/python3
# standard imports
#!/usr/bin/python
import os
import argparse
import logging
import re
# external imports
import alembic
from alembic.config import Config as AlembicConfig
import confini
# local imports
from cic_cache.db import dsn_from_config
import cic_cache.cli
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
# BUG: the dbdir doesn't work after script install
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(cic_cache.__file__)))
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
default_migrations_dir = os.path.join(dbdir, 'migrations')
migrationsdir = os.path.join(dbdir, 'migrations')
configdir = os.path.join(rootdir, 'cic_cache', 'data', 'config')
#config_dir = os.path.join('/usr/local/etc/cic-cache')
arg_flags = cic_cache.cli.argflag_std_base
local_arg_flags = cic_cache.cli.argflag_local_sync
argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, help='config file')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
argparser.add_argument('-f', '--force', action='store_true', help='force action')
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='migrations directory')
argparser.add_argument('-f', action='store_true', help='force action')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args()
extra_args = {
'reset': None,
'force': None,
'migrations_dir': None,
}
# process config
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
migrations_dir = os.path.join(config.get('_MIGRATIONS_DIR'), config.get('DATABASE_ENGINE', 'default'))
config = confini.Config(configdir, args.env_prefix, override_dirs=args.c)
config.process()
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config:\n{}'.format(config))
migrations_dir = os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_dir):
logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE')))
migrations_dir = os.path.join(args.migrations_dir, 'default')
# connect to database
dsn = dsn_from_config(config, 'cic_cache')
dsn = dsn_from_config(config)
logg.info('using migrations dir {}'.format(migrations_dir))

View File

@ -1,7 +1,6 @@
[metadata]
name = cic-cache
description = CIC Cache API and server
version = 0.3.0a2
author = Louis Holbrook
author_email = dev@holbrook.no
url = https://gitlab.com/grassrootseconomics/cic-eth
@ -35,7 +34,7 @@ packages =
cic_cache.runnable.daemons
cic_cache.runnable.daemons.filters
scripts =
./scripts/migrate_cic_cache.py
./scripts/migrate.py
[options.entry_points]
console_scripts =

View File

@ -1,39 +1,38 @@
from setuptools import setup
# import configparser
import configparser
import os
import time
# import time
from cic_cache.version import (
version_object,
version_string
)
# from cic_cache.version import (
# version_object,
# version_string
# )
#
# class PleaseCommitFirstError(Exception):
# pass
#
# def git_hash():
# import subprocess
# git_diff = subprocess.run(['git', 'diff'], capture_output=True)
# if len(git_diff.stdout) > 0:
# raise PleaseCommitFirstError()
# git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
# git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
# return git_hash_brief
#
# version_string = str(version_object)
#
# try:
# version_git = git_hash()
# version_string += '+build.{}'.format(version_git)
# except FileNotFoundError:
# time_string_pair = str(time.time()).split('.')
# version_string += '+build.{}{:<09d}'.format(
# time_string_pair[0],
# int(time_string_pair[1]),
# )
# print('final version string will be {}'.format(version_string))
class PleaseCommitFirstError(Exception):
pass
def git_hash():
import subprocess
git_diff = subprocess.run(['git', 'diff'], capture_output=True)
if len(git_diff.stdout) > 0:
raise PleaseCommitFirstError()
git_hash = subprocess.run(['git', 'rev-parse', 'HEAD'], capture_output=True)
git_hash_brief = git_hash.stdout.decode('utf-8')[:8]
return git_hash_brief
version_string = str(version_object)
try:
version_git = git_hash()
version_string += '+build.{}'.format(version_git)
except FileNotFoundError:
time_string_pair = str(time.time()).split('.')
version_string += '+build.{}{:<09d}'.format(
time_string_pair[0],
int(time_string_pair[1]),
)
print('final version string will be {}'.format(version_string))
requirements = []
f = open('requirements.txt', 'r')
@ -53,8 +52,9 @@ while True:
test_requirements.append(l.rstrip())
f.close()
setup(
# version=version_string,
version=version_string,
install_requires=requirements,
tests_require=test_requirements,
)

View File

@ -7,4 +7,4 @@ pytest-celery==0.0.0a1
eth_tester==0.5.0b3
py-evm==0.3.0a20
sarafu-faucet~=0.0.7a1
erc20-transfer-authorization~=0.3.6
erc20-transfer-authorization>=0.3.5a1,<0.4.0

View File

@ -6,7 +6,6 @@ import datetime
# external imports
import pytest
import moolb
from chainlib.encode import TxHexNormalizer
# local imports
from cic_cache import db
@ -43,8 +42,6 @@ def txs(
list_tokens,
):
tx_normalize = TxHexNormalizer()
session = init_database
tx_number = 13
@ -57,10 +54,10 @@ def txs(
tx_hash_first,
list_defaults['block'],
tx_number,
tx_normalize.wallet_address(list_actors['alice']),
tx_normalize.wallet_address(list_actors['bob']),
tx_normalize.executable_address(list_tokens['foo']),
tx_normalize.executable_address(list_tokens['foo']),
list_actors['alice'],
list_actors['bob'],
list_tokens['foo'],
list_tokens['foo'],
1024,
2048,
True,
@ -77,10 +74,10 @@ def txs(
tx_hash_second,
list_defaults['block']-1,
tx_number,
tx_normalize.wallet_address(list_actors['diane']),
tx_normalize.wallet_address(list_actors['alice']),
tx_normalize.executable_address(list_tokens['foo']),
tx_normalize.wallet_address(list_tokens['foo']),
list_actors['diane'],
list_actors['alice'],
list_tokens['foo'],
list_tokens['foo'],
1024,
2048,
False,
@ -106,8 +103,6 @@ def more_txs(
session = init_database
tx_normalize = TxHexNormalizer()
tx_number = 666
tx_hash = '0x' + os.urandom(32).hex()
tx_signed = '0x' + os.urandom(128).hex()
@ -120,10 +115,10 @@ def more_txs(
tx_hash,
list_defaults['block']+2,
tx_number,
tx_normalize.wallet_address(list_actors['alice']),
tx_normalize.wallet_address(list_actors['diane']),
tx_normalize.executable_address(list_tokens['bar']),
tx_normalize.executable_address(list_tokens['bar']),
list_actors['alice'],
list_actors['diane'],
list_tokens['bar'],
list_tokens['bar'],
2048,
4096,
False,

View File

@ -14,8 +14,7 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, 'config/test')
schema_config_dir = os.path.join(root_dir, 'cic_cache', 'data', 'config')
conf = confini.Config(schema_config_dir, 'CICTEST', override_dirs=config_dir)
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))
return conf

View File

@ -24,15 +24,11 @@ def database_engine(
if load_config.get('DATABASE_ENGINE') == 'sqlite':
SessionBase.transactional = False
SessionBase.poolable = False
name = 'cic_cache'
database_name = name
if load_config.get('DATABASE_PREFIX'):
database_name = '{}_{}'.format(load_config.get('DATABASE_PREFIX'), database_name)
try:
os.unlink(database_name)
os.unlink(load_config.get('DATABASE_NAME'))
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config, name)
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
return dsn

View File

@ -14,7 +14,7 @@ def test_api_all_data(
):
env = {
'PATH_INFO': '/txa/100/0/410000/420000',
'PATH_INFO': '/txa/410000/420000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}
j = process_transactions_all_data(init_database, env)
@ -23,7 +23,7 @@ def test_api_all_data(
assert len(o['data']) == 2
env = {
'PATH_INFO': '/txa/100/0/420000/410000',
'PATH_INFO': '/txa/420000/410000',
'HTTP_X_CIC_CACHE_MODE': 'all',
}

View File

@ -6,7 +6,6 @@ import json
# external imports
import pytest
from chainlib.encode import TxHexNormalizer
# local imports
from cic_cache import db
@ -63,8 +62,6 @@ def test_cache_ranges(
session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1
mid = list_defaults['block']
newest = list_defaults['block'] + 2
@ -103,39 +100,32 @@ def test_cache_ranges(
assert b[1] == mid
# now check when supplying account
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100)
b = c.load_transactions_account(list_actors['alice'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account(account, 0, 100)
b = c.load_transactions_account(list_actors['bob'], 0, 100)
assert b[0] == mid
assert b[1] == mid
account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account(account, 0, 100)
b = c.load_transactions_account(list_actors['diane'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
# add block filter to the mix
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid
assert b[1] == mid
account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == oldest
@ -150,8 +140,6 @@ def test_cache_ranges_data(
session = init_database
tx_normalize = TxHexNormalizer()
oldest = list_defaults['block'] - 1
mid = list_defaults['block']
newest = list_defaults['block'] + 2
@ -215,8 +203,7 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1]
# now check when supplying account
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100)
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 3
@ -224,15 +211,13 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[1]
assert b[2][2]['tx_hash'] == more_txs[2]
account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account_with_data(account, 0, 100)
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
assert b[0] == mid
assert b[1] == mid
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account_with_data(account, 0, 100)
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 2
@ -240,31 +225,27 @@ def test_cache_ranges_data(
assert b[2][1]['tx_hash'] == more_txs[2]
# add block filter to the mix
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['alice'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'])
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['bob'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid
assert b[1] == mid
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1]
account = tx_normalize.wallet_address(list_actors['diane'])
b = c.load_transactions_account_with_data(account, 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == oldest
assert len(b[2]) == 1

View File

@ -82,7 +82,7 @@ def test_query_regex(
[
('alice', None, None, [(420000, 13), (419999, 42)]),
('alice', None, 1, [(420000, 13)]),
('alice', 1, 1, [(419999, 42)]), # 420000 == list_defaults['block']
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
('alice', 2, None, []), # 420000 == list_defaults['block']
],
)
@ -107,11 +107,10 @@ def test_query_process_txs_account(
path_info = '/tx/user/0x' + strip_0x(actor)
if query_offset != None:
path_info += '/' + str(query_offset)
if query_limit == None:
query_limit = 100
path_info += '/' + str(query_limit)
if query_offset == None:
path_info += '/0'
if query_limit != None:
if query_offset == None:
path_info += '/0'
path_info += '/' + str(query_limit)
env = {
'PATH_INFO': path_info,
}
@ -193,7 +192,7 @@ def test_query_process_txs_bloom(
@pytest.mark.parametrize(
'query_block_start, query_block_end, query_match_count',
[
(1, 42, 0),
(None, 42, 0),
(420000, 420001, 1),
(419999, 419999, 1), # matches are inclusive
(419999, 420000, 2),
@ -212,7 +211,7 @@ def test_query_process_txs_data(
query_match_count,
):
path_info = '/txa/100/0'
path_info = '/txa'
if query_block_start != None:
path_info += '/' + str(query_block_start)
if query_block_end != None:
@ -228,5 +227,4 @@ def test_query_process_txs_data(
assert r != None
o = json.loads(r[1])
logg.debug('oo {}'.format(o))
assert len(o['data']) == query_match_count

View File

@ -1,5 +1,5 @@
celery==4.4.7
erc20-demurrage-token~=0.0.6
cic-eth-registry~=0.6.3
chainlib~=0.0.14
cic_eth~=0.12.6
erc20-demurrage-token~=0.0.5a3
cic-eth-registry~=0.6.1a6
chainlib~=0.0.9rc1
cic_eth~=0.12.4a11

View File

@ -1,6 +1,6 @@
[metadata]
name = cic-eth-aux-erc20-demurrage-token
version = 0.0.3
version = 0.0.2a7
description = cic-eth tasks supporting erc20 demurrage token
author = Louis Holbrook
author_email = dev@holbrook.no

View File

@ -1,4 +1,5 @@
SQLAlchemy==1.3.20
hexathon~=0.1.0
chainqueue~=0.0.6a4
eth-erc20~=0.1.5
cic-eth-registry>=0.6.1a6,<0.7.0
hexathon~=0.0.1a8
chainqueue>=0.0.4a6,<0.1.0
eth-erc20>=0.1.2a2,<0.2.0

View File

@ -515,7 +515,7 @@ class Api(ApiBase):
:param password: Password to encode the password with in the backend (careful, you will have to remember it)
:type password: str
:param register: Register the new account in accounts index backend
:type register: bool
:type password: bool
:returns: uuid of root task
:rtype: celery.Task
"""

View File

@ -63,32 +63,22 @@ class Config(BaseConfig):
config.get('REDIS_HOST'),
config.get('REDIS_PORT'),
)
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
redis_url = (
'redis',
hostport,
db,
getattr(args, 'redis_db', None),
)
celery_config_url = urllib.parse.urlsplit(config.get('CELERY_BROKER_URL'))
hostport = urlhostmerge(
celery_config_url[1],
getattr(args, 'celery_host', None),
getattr(args, 'celery_port', None),
)
db = getattr(args, 'redis_db', None)
if db != None:
db = str(db)
celery_arg_url = (
getattr(args, 'celery_scheme', None),
hostport,
db,
getattr(args, 'celery_db', None),
)
celery_url = urlmerge(redis_url, celery_config_url, celery_arg_url)
celery_url_string = urllib.parse.urlunsplit(celery_url)
local_celery_args_override['CELERY_BROKER_URL'] = celery_url_string

View File

@ -22,7 +22,7 @@ from hexathon import (
from chainqueue.error import NotLocalTxError
from eth_erc20 import ERC20
from chainqueue.sql.tx import cache_tx_dict
from okota.token_index.index import to_identifier
from okota.token_index import to_identifier
# local imports
from cic_eth.db.models.base import SessionBase
@ -46,14 +46,13 @@ from cic_eth.task import (
from cic_eth.eth.nonce import CustodialTaskNonceOracle
from cic_eth.encode import tx_normalize
from cic_eth.eth.trust import verify_proofs
from cic_eth.error import SignerError
celery_app = celery.current_app
logg = logging.getLogger()
@celery_app.task(bind=True, base=CriticalWeb3Task)
def balance(self, tokens, holder_address, chain_spec_dict):
@celery_app.task(base=CriticalWeb3Task)
def balance(tokens, holder_address, chain_spec_dict):
"""Return token balances for a list of tokens for given address
:param tokens: Token addresses
@ -72,9 +71,8 @@ def balance(self, tokens, holder_address, chain_spec_dict):
for t in tokens:
address = t['address']
logg.debug('address {} {}'.format(address, holder_address))
gas_oracle = self.create_gas_oracle(rpc, min_price=self.min_fee_price)
token = ERC20Token(chain_spec, rpc, add_0x(address))
c = ERC20(chain_spec, gas_oracle=gas_oracle)
c = ERC20(chain_spec)
o = c.balance_of(address, holder_address, sender_address=caller_address)
r = rpc.do(o)
t['balance_network'] = c.parse_balance(r)
@ -397,8 +395,6 @@ def cache_transfer_data(
sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
@ -406,8 +402,8 @@ def cache_transfer_data(
'hash': tx_hash_hex,
'from': sender_address,
'to': recipient_address,
'source_token': source_token_address,
'destination_token': destination_token_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}
@ -439,16 +435,14 @@ def cache_transfer_from_data(
spender_address = tx_data[0]
recipient_address = tx_data[1]
token_value = tx_data[2]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': recipient_address,
'source_token': source_token_address,
'destination_token': destination_token_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}
@ -480,16 +474,14 @@ def cache_approve_data(
sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': sender_address,
'to': recipient_address,
'source_token': source_token_address,
'destination_token': destination_token_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'from_value': token_value,
'to_value': token_value,
}

View File

@ -92,7 +92,7 @@ def apply_gas_value_cache_local(address, method, value, tx_hash, session=None):
if o == None:
o = GasCache(address, method, value, tx_hash)
elif value > o.value:
elif tx.gas_used > o.value:
o.value = value
o.tx_hash = strip_0x(tx_hash)
@ -386,6 +386,7 @@ def refill_gas(self, recipient_address, chain_spec_dict):
session.flush()
# finally determine the value to send
# TODO: must be dynamic; more gas if price went up.
refill_amount = 0
if not zero_amount:
refill_amount = self.safe_gas_refill_amount

View File

@ -72,7 +72,7 @@ def __balance_incoming_compatible(token_address, receiver_address):
status_compare = dead()
q = q.filter(Otx.status.op('&')(status_compare)==0)
# TODO: this can change the result for the recipient if tx is later obsoleted and resubmission is delayed.
#q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
q = q.filter(TxCache.destination_token_address==token_address)
delta = 0
for r in q.all():

View File

@ -17,7 +17,7 @@ from cic_eth_registry.error import UnknownContractError
# local imports
from cic_eth.error import SeppukuError
from cic_eth.db.models.base import SessionBase
from cic_eth.eth.util import CacheGasOracle, MaxGasOracle
from cic_eth.eth.util import CacheGasOracle
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
@ -25,14 +25,12 @@ logg = logging.getLogger()
celery_app = celery.current_app
class BaseTask(celery.Task):
session_func = SessionBase.create_session
call_address = ZERO_ADDRESS
trusted_addresses = []
min_fee_price = 1
min_fee_limit = 30000
default_token_address = None
default_token_symbol = None
default_token_name = None
@ -41,28 +39,21 @@ class BaseTask(celery.Task):
def create_gas_oracle(self, conn, address=None, *args, **kwargs):
x = None
if address is None:
x = RPCGasOracle(
if address == None:
return RPCGasOracle(
conn,
code_callback=kwargs.get('code_callback', self.get_min_fee_limit),
code_callback=kwargs.get('code_callback'),
min_price=self.min_fee_price,
id_generator=kwargs.get('id_generator'),
)
else:
x = MaxGasOracle(conn)
x.code_callback = x.get_fee_units
return x
def get_min_fee_limit(self, code):
return self.min_fee_limit
def get_min_fee_limit(self, code):
return self.min_fee_limit
return CacheGasOracle(
conn,
address,
method=kwargs.get('method'),
min_price=self.min_fee_price,
id_generator=kwargs.get('id_generator'),
)
def create_session(self):
@ -87,7 +78,7 @@ class BaseTask(celery.Task):
)
s.apply_async()
class CriticalTask(BaseTask):
retry_jitter = True
retry_backoff = True
@ -99,7 +90,7 @@ class CriticalSQLAlchemyTask(CriticalTask):
sqlalchemy.exc.DatabaseError,
sqlalchemy.exc.TimeoutError,
sqlalchemy.exc.ResourceClosedError,
)
)
class CriticalWeb3Task(CriticalTask):
@ -107,7 +98,7 @@ class CriticalWeb3Task(CriticalTask):
ConnectionError,
)
safe_gas_threshold_amount = 60000 * 3
safe_gas_refill_amount = safe_gas_threshold_amount * 5
safe_gas_refill_amount = safe_gas_threshold_amount * 5
safe_gas_gifter_balance = safe_gas_threshold_amount * 5 * 100
@ -125,13 +116,13 @@ class CriticalSQLAlchemyAndSignerTask(CriticalTask):
sqlalchemy.exc.DatabaseError,
sqlalchemy.exc.TimeoutError,
sqlalchemy.exc.ResourceClosedError,
)
)
class CriticalWeb3AndSignerTask(CriticalWeb3Task):
autoretry_for = (
ConnectionError,
)
@celery_app.task()
def check_health(self):
pass

View File

@ -7,10 +7,17 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
#COPY cic-eth/requirements.txt .
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
--pre \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a7
COPY *requirements.txt ./
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url $PIP_INDEX_URL \
@ -18,8 +25,8 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt \
-r services_requirements.txt \
-r admin_requirements.txt
-r admin_requirements.txt
COPY . .
RUN python setup.py install
@ -33,6 +40,8 @@ RUN chmod 755 *.sh
# # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh
@ -57,8 +66,9 @@ ENTRYPOINT []
## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY scripts/ scripts/
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY scripts/ scripts/
#
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
##COPY util/liveness/health.sh /usr/local/bin/health.sh

View File

@ -2,7 +2,7 @@
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r admin_requirements.txt \
-r services_requirements.txt \
-r test_requirements.txt

View File

@ -1,7 +1,4 @@
celery==4.4.7
chainlib-eth>=0.0.10a20,<0.1.0
semver==2.13.0
chainlib-eth~=0.0.15
urlybird~=0.0.1
cic-eth-registry~=0.6.6
cic-types~=0.2.1a8
cic-eth-aux-erc20-demurrage-token~=0.0.3
urlybird~=0.0.1a2

View File

@ -1,15 +1,16 @@
chainqueue~=0.0.6a4
chainsyncer[sql]~=0.0.7
chainqueue>=0.0.6a1,<0.1.0
chainsyncer[sql]>=0.0.7a3,<0.1.0
alembic==1.4.2
confini~=0.5.3
confini>=0.3.6rc4,<0.5.0
redis==3.5.3
hexathon~=0.1.0
hexathon~=0.0.1a8
pycryptodome==3.10.1
liveness~=0.0.1a7
eth-address-index~=0.2.4
eth-accounts-index~=0.1.2
erc20-faucet~=0.3.2
erc20-transfer-authorization~=0.3.6
sarafu-faucet~=0.0.7
moolb~=0.2.0
okota~=0.2.5
eth-address-index>=0.2.4a1,<0.3.0
eth-accounts-index>=0.1.2a3,<0.2.0
cic-eth-registry>=0.6.1a6,<0.7.0
erc20-faucet>=0.3.2a2,<0.4.0
erc20-transfer-authorization>=0.3.5a2,<0.4.0
sarafu-faucet>=0.0.7a2,<0.1.0
moolb~=0.1.1b2
okota>=0.2.4a6,<0.3.0

View File

@ -1,7 +1,7 @@
[metadata]
name = cic-eth
#version = attr: cic_eth.version.__version_string__
version = 0.12.7
version = 0.12.5a2
description = CIC Network Ethereum interaction
author = Louis Holbrook
author_email = dev@holbrook.no

View File

@ -6,5 +6,4 @@ pytest-redis==2.0.0
redis==3.5.3
eth-tester==0.5.0b3
py-evm==0.3.0a20
eth-erc20~=0.1.5
erc20-transfer-authorization~=0.3.6
eth-erc20~=0.1.2a2

View File

@ -40,7 +40,6 @@ def test_filter_gas(
foo_token,
token_registry,
register_lookups,
register_tokens,
celery_session_worker,
cic_registry,
):
@ -70,7 +69,7 @@ def test_filter_gas(
tx = Tx(tx_src, block=block)
tx.apply_receipt(rcpt)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert t.get() == None
assert t == None
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)

View File

@ -2,7 +2,7 @@
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r admin_requirements.txt
-r services_requirements.txt
-r test_requirements.txt

View File

@ -288,6 +288,7 @@ def test_fix_nonce(
init_database.commit()
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys()
assert len(ks) == 2

View File

@ -191,17 +191,11 @@ def test_tokens(
break
api_param = str(uuid.uuid4())
fp = os.path.join(CallbackTask.mmap_path, api_param)
f = open(fp, 'wb+')
f.write(b'\x00')
f.close()
api = Api(str(default_chain_spec), queue=None, callback_param=api_param, callback_task='cic_eth.pytest.mock.callback.test_callback')
t = api.tokens(['BAR'], proof=[[bar_token_declaration]])
r = t.get()
logg.debug('rr {} {}'.format(r, t.children))
while True:
fp = os.path.join(CallbackTask.mmap_path, api_param)
try:

View File

@ -35,26 +35,10 @@ from hexathon import strip_0x
from cic_eth.eth.gas import cache_gas_data
from cic_eth.error import OutOfGasError
from cic_eth.queue.tx import queue_create
from cic_eth.task import BaseTask
logg = logging.getLogger()
def test_task_gas_limit(
eth_rpc,
eth_signer,
default_chain_spec,
agent_roles,
celery_session_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
gas_oracle = BaseTask().create_gas_oracle(rpc)
c = Gas(default_chain_spec, signer=eth_signer, gas_oracle=gas_oracle)
(tx_hash_hex, o) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 10, tx_format=TxFormat.RLP_SIGNED)
tx = unpack(bytes.fromhex(strip_0x(o)), default_chain_spec)
assert (tx['gas'], BaseTask.min_fee_price)
def test_task_check_gas_ok(
default_chain_spec,
eth_rpc,

View File

@ -143,7 +143,7 @@ def test_incoming_balance(
'converters': [],
}
b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
assert b[0]['balance_incoming'] == 1000
assert b[0]['balance_incoming'] == 0
otx.readysend(session=init_database)
init_database.flush()
@ -152,8 +152,8 @@ def test_incoming_balance(
otx.sent(session=init_database)
init_database.commit()
#b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
#assert b[0]['balance_incoming'] == 1000
b = balance_incoming([token_data], recipient, default_chain_spec.asdict())
assert b[0]['balance_incoming'] == 1000
otx.success(block=1024, session=init_database)
init_database.commit()

View File

@ -1,5 +1,7 @@
chainqueue~=0.0.6a4
crypto-dev-signer>=0.4.15rc2,<=0.4.15
chainqueue>=0.0.5a3,<0.1.0
cic-eth-registry>=0.6.1a6,<0.7.0
redis==3.5.3
hexathon~=0.1.0
hexathon~=0.0.1a8
pycryptodome==3.10.1
pyxdg==0.27

View File

@ -1,9 +1,10 @@
[database]
name=cic_notify_test
user=
password=
host=localhost
port=
engine=sqlite
driver=pysqlite
debug=0
[DATABASE]
user = postgres
password =
host = localhost
port = 5432
name = /tmp/cic-notify.db
#engine = postgresql
#driver = psycopg2
engine = sqlite
driver = pysqlite

View File

@ -1,7 +0,0 @@
[report]
omit =
venv/*
scripts/*
cic_notify/db/migrations/*
cic_notify/runnable/*
cic_notify/version.py

View File

@ -3,7 +3,6 @@ import logging
import re
# third-party imports
import cic_notify.tasks.sms.db
from celery.app.control import Inspect
import celery
@ -14,16 +13,45 @@ app = celery.current_app
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
sms_tasks_matcher = r"^(cic_notify.tasks.sms)(\.\w+)?"
re_q = r'^cic-notify'
def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'):
host_queues = []
i = Inspect(app=app)
qs = i.active_queues()
for host in qs.keys():
for q in qs[host]:
if re.match(re_q, q['name']):
host_queues.append((host, q['name'],))
task_prefix_len = len(task_prefix)
queue_tasks = []
for (host, queue) in host_queues:
i = Inspect(app=app, destination=[host])
for tasks in i.registered_tasks().values():
for task in tasks:
if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix:
queue_tasks.append((queue, task,))
return queue_tasks
class Api:
def __init__(self, queue: any = 'cic-notify'):
# TODO: Implement callback strategy
def __init__(self, queue=None):
"""
:param queue: The queue on which to execute notification tasks
:type queue: str
"""
self.queue = queue
self.sms_tasks = get_sms_queue_tasks(app)
logg.debug('sms tasks {}'.format(self.sms_tasks))
def sms(self, message: str, recipient: str):
def sms(self, message, recipient):
"""This function chains all sms tasks in order to send a message, log and persist said data to disk
:param message: The message to be sent to the recipient.
:type message: str
@ -32,9 +60,24 @@ class Api:
:return: a celery Task
:rtype: Celery.Task
"""
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=self.queue)
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=self.queue)
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=self.queue)
signatures = [s_send, s_log, s_persist_notification]
return celery.group(signatures)()
signatures = []
for q in self.sms_tasks:
if not self.queue:
queue = q[0]
else:
queue = self.queue
signature = celery.signature(
q[1],
[
message,
recipient,
],
queue=queue,
)
signatures.append(signature)
t = celery.group(signatures)()
return t

View File

@ -2,7 +2,7 @@
[alembic]
# path to migration scripts
script_location = .
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
@ -27,17 +27,28 @@ script_location = .
# sourceless = false
# version location specification; this defaults
# to ./versions. When using multiple version
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat ./versions
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgres+psycopg2://postgres@localhost/cic-notify
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

View File

@ -11,7 +11,7 @@ config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name, disable_existing_loggers=True)
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
@ -56,14 +56,11 @@ def run_migrations_online():
and associate a connection with the context.
"""
connectable = context.config.attributes.get("connection", None)
if connectable is None:
connectable = engine_from_config(
context.config.get_section(context.config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(

View File

@ -7,7 +7,7 @@ import celery
celery_app = celery.current_app
logg = celery_app.log.get_default_logger()
local_logg = logging.getLogger()
local_logg = logging.getLogger(__name__)
@celery_app.task

View File

@ -9,7 +9,7 @@ import semver
logg = logging.getLogger()
version = (0, 4, 0, 'alpha.12')
version = (0, 4, 0, 'alpha.11')
version_object = semver.VersionInfo(
major=version[0],

View File

@ -6,7 +6,7 @@ FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
RUN apt-get install libffi-dev -y
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL=https://pypi.org/simple

View File

@ -2,7 +2,7 @@
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net \
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r test_requirements.txt

View File

@ -1,4 +1,4 @@
confini~=0.5.1
confini>=0.3.6rc4,<0.5.0
africastalking==1.2.3
SQLAlchemy==1.3.20
alembic==1.4.2

View File

@ -1,9 +1,5 @@
Faker==11.1.0
faker-e164==0.1.0
pytest==6.2.5
pytest-celery~=0.0.0
pytest-mock==3.6.1
pysqlite3~=0.4.6
pytest-cov==3.0.0
pytest-alembic==0.7.0
requests-mock==1.9.3
pytest~=6.0.1
pytest-celery~=0.0.0a1
pytest-mock~=3.3.1
pysqlite3~=0.4.3
pytest-cov==2.10.1

View File

@ -1,28 +0,0 @@
import pytest
def test_single_head_revision(alembic_runner):
heads = alembic_runner.heads
head_count = len(heads)
assert head_count == 1
def test_upgrade(alembic_runner):
try:
alembic_runner.migrate_up_to("head")
except RuntimeError:
pytest.fail('Failed to upgrade to the head revision.')
def test_up_down_consistency(alembic_runner):
try:
for revision in alembic_runner.history.revisions:
alembic_runner.migrate_up_to(revision)
except RuntimeError:
pytest.fail('Failed to upgrade through each revision individually.')
try:
for revision in reversed(alembic_runner.history.revisions):
alembic_runner.migrate_down_to(revision)
except RuntimeError:
pytest.fail('Failed to downgrade through each revision individually.')

View File

@ -1,27 +0,0 @@
# standard imports
# external imports
from faker import Faker
from faker_e164.providers import E164Provider
# local imports
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
from cic_notify.db.models.notification import Notification
# test imports
from tests.helpers.phone import phone_number
def test_notification(init_database):
message = 'Hello world'
recipient = phone_number()
notification = Notification(NotificationTransportEnum.SMS, recipient, message)
init_database.add(notification)
init_database.commit()
notification = init_database.query(Notification).get(1)
assert notification.status == NotificationStatusEnum.UNKNOWN
assert notification.recipient == recipient
assert notification.message == message
assert notification.transport == NotificationTransportEnum.SMS

View File

@ -1,38 +0,0 @@
# standard imports
import os
# third-party imports
# local imports
from cic_notify.db import dsn_from_config
def test_dsn_from_config(load_config):
"""
"""
# test dsn for other db formats
overrides = {
'DATABASE_PASSWORD': 'password',
'DATABASE_DRIVER': 'psycopg2',
'DATABASE_ENGINE': 'postgresql'
}
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
dsn = dsn_from_config(load_config)
assert dsn == f"{scheme}://{load_config.get('DATABASE_USER')}:{load_config.get('DATABASE_PASSWORD')}@{load_config.get('DATABASE_HOST')}:{load_config.get('DATABASE_PORT')}/{load_config.get('DATABASE_NAME')}"
# undoes overrides to revert engine and drivers to sqlite
overrides = {
'DATABASE_PASSWORD': '',
'DATABASE_DRIVER': 'pysqlite',
'DATABASE_ENGINE': 'sqlite'
}
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
# test dsn for sqlite engine
dsn = dsn_from_config(load_config)
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
assert dsn == f'{scheme}:///{load_config.get("DATABASE_NAME")}'

View File

@ -1,75 +0,0 @@
# standard imports
import logging
import os
# external imports
import pytest
import requests_mock
# local imports
from cic_notify.error import NotInitializedError, AlreadyInitializedError, NotificationSendError
from cic_notify.tasks.sms.africastalking import AfricasTalkingNotifier
# test imports
from tests.helpers.phone import phone_number
def test_africas_talking_notifier(africastalking_response, caplog):
caplog.set_level(logging.DEBUG)
with pytest.raises(NotInitializedError) as error:
AfricasTalkingNotifier()
assert str(error.value) == ''
api_key = os.urandom(24).hex()
sender_id = 'bar'
username = 'sandbox'
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
africastalking_notifier = AfricasTalkingNotifier()
assert africastalking_notifier.sender_id == sender_id
assert africastalking_notifier.initiated is True
with pytest.raises(AlreadyInitializedError) as error:
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
assert str(error.value) == ''
with requests_mock.Mocker(real_http=False) as request_mocker:
message = 'Hello world.'
recipient = phone_number()
africastalking_response.get('SMSMessageData').get('Recipients')[0]['number'] = recipient
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert f'Africastalking response sender-id {africastalking_response}' in caplog.text
africastalking_notifier.sender_id = None
africastalking_notifier.send(message, recipient)
assert f'africastalking response no-sender-id {africastalking_response}' in caplog.text
with pytest.raises(NotificationSendError) as error:
status = 'InvalidPhoneNumber'
status_code = 403
africastalking_response.get('SMSMessageData').get('Recipients')[0]['status'] = status
africastalking_response.get('SMSMessageData').get('Recipients')[0]['statusCode'] = status_code
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert str(error.value) == f'Sending notification failed due to: {status}'
with pytest.raises(NotificationSendError) as error:
recipients = []
status = 'InsufficientBalance'
africastalking_response.get('SMSMessageData')['Recipients'] = recipients
africastalking_response.get('SMSMessageData')['Message'] = status
request_mocker.register_uri(method='POST',
headers={'content-type': 'application/json'},
json=africastalking_response,
url='https://api.sandbox.africastalking.com/version1/messaging',
status_code=200)
africastalking_notifier.send(message, recipient)
assert str(error.value) == f'Unexpected number of recipients: {len(recipients)}. Status: {status}'

View File

@ -1,26 +0,0 @@
# standard imports
# external imports
import celery
# local imports
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
from cic_notify.db.models.notification import Notification
# test imports
from tests.helpers.phone import phone_number
def test_persist_notification(celery_session_worker, init_database):
message = 'Hello world.'
recipient = phone_number()
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', (message, recipient)
)
s_persist_notification.apply_async().get()
notification = Notification.session.query(Notification).filter_by(recipient=recipient).first()
assert notification.status == NotificationStatusEnum.UNKNOWN
assert notification.recipient == recipient
assert notification.message == message
assert notification.transport == NotificationTransportEnum.SMS

View File

@ -1,21 +0,0 @@
# standard imports
import logging
# external imports
import celery
# local imports
# test imports
from tests.helpers.phone import phone_number
def test_log(caplog, celery_session_worker):
message = 'Hello world.'
recipient = phone_number()
caplog.set_level(logging.INFO)
s_log = celery.signature(
'cic_notify.tasks.sms.log.log', [message, recipient]
)
s_log.apply_async().get()
assert f'message to {recipient}: {message}' in caplog.text

View File

@ -1,24 +0,0 @@
# standard imports
# external imports
import celery
# local imports
from cic_notify.api import Api
# test imports
from tests.helpers.phone import phone_number
def test_api(celery_session_worker, mocker):
mocked_group = mocker.patch('celery.group')
message = 'Hello world.'
recipient = phone_number()
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=None)
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=None)
s_persist_notification = celery.signature(
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=None)
signatures = [s_send, s_log, s_persist_notification]
api = Api(queue=None)
api.sms(message, recipient)
mocked_group.assert_called_with(signatures)

View File

@ -1,13 +1,31 @@
# standard imports
import sys
import os
import pytest
import logging
# third party imports
import confini
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
sys.path.insert(0, root_dir)
# local imports
from cic_notify.db.models.base import SessionBase
#from transport.notification import AfricastalkingNotification
# test imports
# fixtures
from tests.fixtures_config import *
from tests.fixtures_celery import *
from tests.fixtures_database import *
from .fixtures.celery import *
from .fixtures.config import *
from .fixtures.database import *
from .fixtures.result import *
logg = logging.getLogger()
#@pytest.fixture(scope='session')
#def africastalking_notification(
# load_config,
# ):
# return AfricastalkingNotificationTransport(load_config)
#

View File

@ -1,32 +0,0 @@
# standard imports
import os
import logging
# external imports
import pytest
from confini import Config
logg = logging.getLogger(__file__)
fixtures_dir = os.path.dirname(__file__)
root_directory = os.path.dirname(os.path.dirname(fixtures_dir))
@pytest.fixture(scope='session')
def alembic_config():
migrations_directory = os.path.join(root_directory, 'cic_notify', 'db', 'migrations', 'default')
file = os.path.join(migrations_directory, 'alembic.ini')
return {
'file': file,
'script_location': migrations_directory
}
@pytest.fixture(scope='session')
def load_config():
config_directory = os.path.join(root_directory, '.config/test')
config = Config(default_dir=config_directory)
config.process()
logg.debug('config loaded\n{}'.format(config))
return config

View File

@ -1,54 +0,0 @@
# standard imports
import os
# third-party imports
import pytest
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_notify.db import dsn_from_config
from cic_notify.db.models.base import SessionBase, create_engine
from .config import root_directory
@pytest.fixture(scope='session')
def alembic_engine(load_config):
data_source_name = dsn_from_config(load_config)
return create_engine(data_source_name)
@pytest.fixture(scope='session')
def database_engine(load_config):
if load_config.get('DATABASE_ENGINE') == 'sqlite':
try:
os.unlink(load_config.get('DATABASE_NAME'))
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
return dsn
@pytest.fixture(scope='function')
def init_database(load_config, database_engine):
db_directory = os.path.join(root_directory, 'cic_notify', 'db')
migrations_directory = os.path.join(db_directory, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrations_directory):
migrations_directory = os.path.join(db_directory, 'migrations', 'default')
session = SessionBase.create_session()
alembic_config = AlembicConfig(os.path.join(migrations_directory, 'alembic.ini'))
alembic_config.set_main_option('sqlalchemy.url', database_engine)
alembic_config.set_main_option('script_location', migrations_directory)
alembic.command.downgrade(alembic_config, 'base')
alembic.command.upgrade(alembic_config, 'head')
yield session
session.commit()
session.close()

View File

@ -1,24 +0,0 @@
# standard imports
# external imports
import pytest
# local imports
# test imports
@pytest.fixture(scope="function")
def africastalking_response():
return {
"SMSMessageData": {
"Message": "Sent to 1/1 Total Cost: KES 0.8000",
"Recipients": [{
"statusCode": 101,
"number": "+254711XXXYYY",
"status": "Success",
"cost": "KES 0.8000",
"messageId": "ATPid_SampleTxnId123"
}]
}
}

View File

@ -37,6 +37,12 @@ def celery_config():
shutil.rmtree(rq)
@pytest.fixture(scope='session')
def celery_worker_parameters():
return {
# 'queues': ('cic-notify'),
}
@pytest.fixture(scope='session')
def celery_enable_logging():
return True

View File

@ -0,0 +1,20 @@
# standard imports
import os
import logging
# third-party imports
import pytest
import confini
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, '.config/test')
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))
return conf

View File

@ -0,0 +1,48 @@
# standard imports
import os
# third-party imports
import pytest
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_notify.db import SessionBase
from cic_notify.db import dsn_from_config
@pytest.fixture(scope='session')
def database_engine(
load_config,
):
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
return dsn
@pytest.fixture(scope='function')
def init_database(
load_config,
database_engine,
):
rootdir = os.path.dirname(os.path.dirname(__file__))
dbdir = os.path.join(rootdir, 'cic_notify', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrationsdir):
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
session = SessionBase.create_session()
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', database_engine)
ac.set_main_option('script_location', migrationsdir)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')
yield session
session.commit()
session.close()

View File

@ -1,16 +0,0 @@
# standard imports
# external imports
from faker import Faker
from faker_e164.providers import E164Provider
# local imports
# test imports
fake = Faker()
fake.add_provider(E164Provider)
def phone_number() -> str:
return fake.e164('KE')

View File

@ -0,0 +1,34 @@
# standard imports
import json
# third party imports
import pytest
import celery
# local imports
from cic_notify.tasks.sms import db
from cic_notify.tasks.sms import log
def test_log_notification(
celery_session_worker,
):
recipient = '+25412121212'
content = 'bar'
s_log = celery.signature('cic_notify.tasks.sms.log.log')
t = s_log.apply_async(args=[recipient, content])
r = t.get()
def test_db_notification(
init_database,
celery_session_worker,
):
recipient = '+25412121213'
content = 'foo'
s_db = celery.signature('cic_notify.tasks.sms.db.persist_notification')
t = s_db.apply_async(args=[recipient, content])
r = t.get()

View File

@ -8,7 +8,7 @@ RUN apt-get install libffi-dev -y
COPY requirements.txt .
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net"
ARG EXTRA_PIP_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG EXTRA_PIP_ARGS=""
ARG PIP_INDEX_URL="https://pypi.org/simple"
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \

View File

@ -3,5 +3,4 @@ omit =
venv/*
scripts/*
cic_ussd/db/migrations/*
cic_ussd/runnable/*
cic_ussd/version.py
cic_ussd/runnable/*

View File

@ -2,7 +2,7 @@
import json
import logging
from typing import Union, Optional
from typing import Optional
# third-party imports
from cic_eth.api import Api
@ -14,7 +14,7 @@ from cic_ussd.account.transaction import from_wei
from cic_ussd.cache import cache_data_key, get_cached_data
from cic_ussd.error import CachedDataNotFoundError
logg = logging.getLogger(__file__)
logg = logging.getLogger()
def get_balances(address: str,
@ -43,7 +43,7 @@ def get_balances(address: str,
:return: A list containing balance data if called synchronously. | None
:rtype: list | None
"""
logg.debug(f'retrieving {token_symbol} balance for address: {address}')
logg.debug(f'retrieving balance for address: {address}')
if asynchronous:
cic_eth_api = Api(
chain_str=chain_str,
@ -60,13 +60,11 @@ def get_balances(address: str,
return balance_request_task.get()
def calculate_available_balance(balances: dict, decimals: int) -> float:
def calculate_available_balance(balances: dict) -> float:
"""This function calculates an account's balance at a specific point in time by computing the difference from the
outgoing balance and the sum of the incoming and network balances.
:param balances: incoming, network and outgoing balances.
:type balances: dict
:param decimals:
:type decimals: int
:return: Token value of the available balance.
:rtype: float
"""
@ -75,7 +73,7 @@ def calculate_available_balance(balances: dict, decimals: int) -> float:
network_balance = balances.get('balance_network')
available_balance = (network_balance + incoming_balance) - outgoing_balance
return from_wei(decimals=decimals, value=available_balance)
return from_wei(value=available_balance)
def get_adjusted_balance(balance: int, chain_str: str, timestamp: int, token_symbol: str):
@ -96,25 +94,24 @@ def get_adjusted_balance(balance: int, chain_str: str, timestamp: int, token_sym
return demurrage_api.get_adjusted_balance(token_symbol, balance, timestamp).result
def get_cached_available_balance(decimals: int, identifier: Union[list, bytes]) -> float:
def get_cached_available_balance(blockchain_address: str) -> float:
"""This function attempts to retrieve balance data from the redis cache.
:param decimals:
:type decimals: int
:param identifier: An identifier needed to create a unique pointer to a balances resource.
:type identifier: bytes | list
:param blockchain_address: Ethereum address of an account.
:type blockchain_address: str
:raises CachedDataNotFoundError: No cached balance data could be found.
:return: Operational balance of an account.
:rtype: float
"""
key = cache_data_key(identifier=identifier, salt=MetadataPointer.BALANCES)
identifier = bytes.fromhex(blockchain_address)
key = cache_data_key(identifier, salt=MetadataPointer.BALANCES)
cached_balances = get_cached_data(key=key)
if cached_balances:
return calculate_available_balance(balances=json.loads(cached_balances), decimals=decimals)
return calculate_available_balance(json.loads(cached_balances))
else:
raise CachedDataNotFoundError(f'No cached available balance at {key}')
raise CachedDataNotFoundError(f'No cached available balance for address: {blockchain_address}')
def get_cached_adjusted_balance(identifier: Union[list, bytes]):
def get_cached_adjusted_balance(identifier: bytes):
"""
:param identifier:
:type identifier:
@ -123,22 +120,3 @@ def get_cached_adjusted_balance(identifier: Union[list, bytes]):
"""
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
return get_cached_data(key)
def get_account_tokens_balance(blockchain_address: str, chain_str: str, token_symbols_list: list):
"""
:param blockchain_address:
:type blockchain_address:
:param chain_str:
:type chain_str:
:param token_symbols_list:
:type token_symbols_list:
:return:
:rtype:
"""
for token_symbol in token_symbols_list:
get_balances(address=blockchain_address,
chain_str=chain_str,
token_symbol=token_symbol,
asynchronous=True,
callback_param=f'{blockchain_address},{token_symbol}')

View File

@ -1,22 +0,0 @@
# standard imports
# external imports
# local imports
class Guardianship:
guardians: list = []
@classmethod
def load_system_guardians(cls, guardians_file: str):
with open(guardians_file, 'r') as system_guardians:
cls.guardians = [line.strip() for line in system_guardians]
def is_system_guardian(self, phone_number: str):
"""
:param phone_number:
:type phone_number:
:return:
:rtype:
"""
return phone_number in self.guardians

Some files were not shown because too many files have changed in this diff Show More