Compare commits
	
		
			No commits in common. "master" and "3dc9dffb26ff1eeabcbbb68a90566adc02852f79" have entirely different histories.
		
	
	
		
			master
			...
			3dc9dffb26
		
	
		
							
								
								
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -5,4 +5,3 @@ build/ | |||||||
| dist/ | dist/ | ||||||
| *.html | *.html | ||||||
| *.egg-info/ | *.egg-info/ | ||||||
| .venv |  | ||||||
|  | |||||||
							
								
								
									
										49
									
								
								CHANGELOG
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								CHANGELOG
									
									
									
									
									
								
							| @ -1,52 +1,3 @@ | |||||||
| - 0.2.3 |  | ||||||
| 	* Update wrong license file |  | ||||||
| - 0.2.2 |  | ||||||
| 	* Change license to AGPL3 and copyright waived to public domain |  | ||||||
| - 0.2.1 |  | ||||||
| 	* Implement shep 0.3.0 |  | ||||||
| 	* Implement chainlib 0.4.x |  | ||||||
| - 0.2.0 |  | ||||||
| 	* Implement chainlib 0.3.0 |  | ||||||
| - 0.1.16 |  | ||||||
| 	* Queue list cli tool |  | ||||||
| 	* State parser cli tool |  | ||||||
| 	* Provide pluggable renderer capability for queue list cli tool |  | ||||||
| 	* Move path and state query parsing to settings module |  | ||||||
| 	* Add queue path and digest parameters to base config |  | ||||||
| - 0.1.15 |  | ||||||
| 	* Upgrade shep to avoid sync in persist set |  | ||||||
| - 0.1.14 |  | ||||||
| 	* Upgrade shep to handle exception in filestore list |  | ||||||
| - 0.1.13 |  | ||||||
| 	* Remove sync on each get |  | ||||||
| 	* Upgrade shep to guarantee atomic state lock state |  | ||||||
| - 0.1.12 |  | ||||||
| 	* Raise correct exception from index store exists check |  | ||||||
| - 0.1.11 |  | ||||||
| 	* Allow for sync skip in store instantiation |  | ||||||
| - 0.1.10 |  | ||||||
| 	* Improve logging |  | ||||||
| - 0.1.9 |  | ||||||
| 	* Upgrade deps |  | ||||||
| - 0.1.8 |  | ||||||
| 	* Upgrade deps |  | ||||||
| - 0.1.7  |  | ||||||
| 	* Improve logging |  | ||||||
| - 0.1.6 |  | ||||||
| 	* Sort upcoming queue item chronologically |  | ||||||
| 	* Add unit testing for upcoming query method |  | ||||||
| - 0.1.5 |  | ||||||
| 	* Add reserved state check method |  | ||||||
| - 0.1.4 |  | ||||||
| 	* Dependency cleanups |  | ||||||
| - 0.1.3 |  | ||||||
| 	* Add CLI args and config handling, settings object |  | ||||||
| - 0.1.2 |  | ||||||
| 	* Add CLI inspection tools |  | ||||||
| - 0.1.1 |  | ||||||
| 	*  |  | ||||||
| - 0.1.0 |  | ||||||
| 	* Replace state transitions with shep |  | ||||||
| - 0.0.3 | - 0.0.3 | ||||||
| 	* cli tool for listing queue by address | 	* cli tool for listing queue by address | ||||||
| 	* ensure lowercase hex input in db | 	* ensure lowercase hex input in db | ||||||
|  | |||||||
							
								
								
									
										141
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										141
									
								
								LICENSE
									
									
									
									
									
								
							| @ -1,5 +1,5 @@ | |||||||
|                     GNU AFFERO GENERAL PUBLIC LICENSE |                     GNU GENERAL PUBLIC LICENSE | ||||||
|                        Version 3, 19 November 2007 |                        Version 3, 29 June 2007 | ||||||
| 
 | 
 | ||||||
|  Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> |  Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> | ||||||
|  Everyone is permitted to copy and distribute verbatim copies |  Everyone is permitted to copy and distribute verbatim copies | ||||||
| @ -7,15 +7,17 @@ | |||||||
| 
 | 
 | ||||||
|                             Preamble |                             Preamble | ||||||
| 
 | 
 | ||||||
|   The GNU Affero General Public License is a free, copyleft license for |   The GNU General Public License is a free, copyleft license for | ||||||
| software and other kinds of works, specifically designed to ensure | software and other kinds of works. | ||||||
| cooperation with the community in the case of network server software. |  | ||||||
| 
 | 
 | ||||||
|   The licenses for most software and other practical works are designed |   The licenses for most software and other practical works are designed | ||||||
| to take away your freedom to share and change the works.  By contrast, | to take away your freedom to share and change the works.  By contrast, | ||||||
| our General Public Licenses are intended to guarantee your freedom to | the GNU General Public License is intended to guarantee your freedom to | ||||||
| share and change all versions of a program--to make sure it remains free | share and change all versions of a program--to make sure it remains free | ||||||
| software for all its users. | software for all its users.  We, the Free Software Foundation, use the | ||||||
|  | GNU General Public License for most of our software; it applies also to | ||||||
|  | any other work released this way by its authors.  You can apply it to | ||||||
|  | your programs, too. | ||||||
| 
 | 
 | ||||||
|   When we speak of free software, we are referring to freedom, not |   When we speak of free software, we are referring to freedom, not | ||||||
| price.  Our General Public Licenses are designed to make sure that you | price.  Our General Public Licenses are designed to make sure that you | ||||||
| @ -24,34 +26,44 @@ them if you wish), that you receive source code or can get it if you | |||||||
| want it, that you can change the software or use pieces of it in new | want it, that you can change the software or use pieces of it in new | ||||||
| free programs, and that you know you can do these things. | free programs, and that you know you can do these things. | ||||||
| 
 | 
 | ||||||
|   Developers that use our General Public Licenses protect your rights |   To protect your rights, we need to prevent others from denying you | ||||||
| with two steps: (1) assert copyright on the software, and (2) offer | these rights or asking you to surrender the rights.  Therefore, you have | ||||||
| you this License which gives you legal permission to copy, distribute | certain responsibilities if you distribute copies of the software, or if | ||||||
| and/or modify the software. | you modify it: responsibilities to respect the freedom of others. | ||||||
| 
 | 
 | ||||||
|   A secondary benefit of defending all users' freedom is that |   For example, if you distribute copies of such a program, whether | ||||||
| improvements made in alternate versions of the program, if they | gratis or for a fee, you must pass on to the recipients the same | ||||||
| receive widespread use, become available for other developers to | freedoms that you received.  You must make sure that they, too, receive | ||||||
| incorporate.  Many developers of free software are heartened and | or can get the source code.  And you must show them these terms so they | ||||||
| encouraged by the resulting cooperation.  However, in the case of | know their rights. | ||||||
| software used on network servers, this result may fail to come about. |  | ||||||
| The GNU General Public License permits making a modified version and |  | ||||||
| letting the public access it on a server without ever releasing its |  | ||||||
| source code to the public. |  | ||||||
| 
 | 
 | ||||||
|   The GNU Affero General Public License is designed specifically to |   Developers that use the GNU GPL protect your rights with two steps: | ||||||
| ensure that, in such cases, the modified source code becomes available | (1) assert copyright on the software, and (2) offer you this License | ||||||
| to the community.  It requires the operator of a network server to | giving you legal permission to copy, distribute and/or modify it. | ||||||
| provide the source code of the modified version running there to the |  | ||||||
| users of that server.  Therefore, public use of a modified version, on |  | ||||||
| a publicly accessible server, gives the public access to the source |  | ||||||
| code of the modified version. |  | ||||||
| 
 | 
 | ||||||
|   An older license, called the Affero General Public License and |   For the developers' and authors' protection, the GPL clearly explains | ||||||
| published by Affero, was designed to accomplish similar goals.  This is | that there is no warranty for this free software.  For both users' and | ||||||
| a different license, not a version of the Affero GPL, but Affero has | authors' sake, the GPL requires that modified versions be marked as | ||||||
| released a new version of the Affero GPL which permits relicensing under | changed, so that their problems will not be attributed erroneously to | ||||||
| this license. | authors of previous versions. | ||||||
|  | 
 | ||||||
|  |   Some devices are designed to deny users access to install or run | ||||||
|  | modified versions of the software inside them, although the manufacturer | ||||||
|  | can do so.  This is fundamentally incompatible with the aim of | ||||||
|  | protecting users' freedom to change the software.  The systematic | ||||||
|  | pattern of such abuse occurs in the area of products for individuals to | ||||||
|  | use, which is precisely where it is most unacceptable.  Therefore, we | ||||||
|  | have designed this version of the GPL to prohibit the practice for those | ||||||
|  | products.  If such problems arise substantially in other domains, we | ||||||
|  | stand ready to extend this provision to those domains in future versions | ||||||
|  | of the GPL, as needed to protect the freedom of users. | ||||||
|  | 
 | ||||||
|  |   Finally, every program is threatened constantly by software patents. | ||||||
|  | States should not allow patents to restrict development and use of | ||||||
|  | software on general-purpose computers, but in those that do, we wish to | ||||||
|  | avoid the special danger that patents applied to a free program could | ||||||
|  | make it effectively proprietary.  To prevent this, the GPL assures that | ||||||
|  | patents cannot be used to render the program non-free. | ||||||
| 
 | 
 | ||||||
|   The precise terms and conditions for copying, distribution and |   The precise terms and conditions for copying, distribution and | ||||||
| modification follow. | modification follow. | ||||||
| @ -60,7 +72,7 @@ modification follow. | |||||||
| 
 | 
 | ||||||
|   0. Definitions. |   0. Definitions. | ||||||
| 
 | 
 | ||||||
|   "This License" refers to version 3 of the GNU Affero General Public License. |   "This License" refers to version 3 of the GNU General Public License. | ||||||
| 
 | 
 | ||||||
|   "Copyright" also means copyright-like laws that apply to other kinds of |   "Copyright" also means copyright-like laws that apply to other kinds of | ||||||
| works, such as semiconductor masks. | works, such as semiconductor masks. | ||||||
| @ -537,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey | |||||||
| the Program, the only way you could satisfy both those terms and this | the Program, the only way you could satisfy both those terms and this | ||||||
| License would be to refrain entirely from conveying the Program. | License would be to refrain entirely from conveying the Program. | ||||||
| 
 | 
 | ||||||
|   13. Remote Network Interaction; Use with the GNU General Public License. |   13. Use with the GNU Affero General Public License. | ||||||
| 
 |  | ||||||
|   Notwithstanding any other provision of this License, if you modify the |  | ||||||
| Program, your modified version must prominently offer all users |  | ||||||
| interacting with it remotely through a computer network (if your version |  | ||||||
| supports such interaction) an opportunity to receive the Corresponding |  | ||||||
| Source of your version by providing access to the Corresponding Source |  | ||||||
| from a network server at no charge, through some standard or customary |  | ||||||
| means of facilitating copying of software.  This Corresponding Source |  | ||||||
| shall include the Corresponding Source for any work covered by version 3 |  | ||||||
| of the GNU General Public License that is incorporated pursuant to the |  | ||||||
| following paragraph. |  | ||||||
| 
 | 
 | ||||||
|   Notwithstanding any other provision of this License, you have |   Notwithstanding any other provision of this License, you have | ||||||
| permission to link or combine any covered work with a work licensed | permission to link or combine any covered work with a work licensed | ||||||
| under version 3 of the GNU General Public License into a single | under version 3 of the GNU Affero General Public License into a single | ||||||
| combined work, and to convey the resulting work.  The terms of this | combined work, and to convey the resulting work.  The terms of this | ||||||
| License will continue to apply to the part which is the covered work, | License will continue to apply to the part which is the covered work, | ||||||
| but the work with which it is combined will remain governed by version | but the special requirements of the GNU Affero General Public License, | ||||||
| 3 of the GNU General Public License. | section 13, concerning interaction through a network will apply to the | ||||||
|  | combination as such. | ||||||
| 
 | 
 | ||||||
|   14. Revised Versions of this License. |   14. Revised Versions of this License. | ||||||
| 
 | 
 | ||||||
|   The Free Software Foundation may publish revised and/or new versions of |   The Free Software Foundation may publish revised and/or new versions of | ||||||
| the GNU Affero General Public License from time to time.  Such new versions | the GNU General Public License from time to time.  Such new versions will | ||||||
| will be similar in spirit to the present version, but may differ in detail to | be similar in spirit to the present version, but may differ in detail to | ||||||
| address new problems or concerns. | address new problems or concerns. | ||||||
| 
 | 
 | ||||||
|   Each version is given a distinguishing version number.  If the |   Each version is given a distinguishing version number.  If the | ||||||
| Program specifies that a certain numbered version of the GNU Affero General | Program specifies that a certain numbered version of the GNU General | ||||||
| Public License "or any later version" applies to it, you have the | Public License "or any later version" applies to it, you have the | ||||||
| option of following the terms and conditions either of that numbered | option of following the terms and conditions either of that numbered | ||||||
| version or of any later version published by the Free Software | version or of any later version published by the Free Software | ||||||
| Foundation.  If the Program does not specify a version number of the | Foundation.  If the Program does not specify a version number of the | ||||||
| GNU Affero General Public License, you may choose any version ever published | GNU General Public License, you may choose any version ever published | ||||||
| by the Free Software Foundation. | by the Free Software Foundation. | ||||||
| 
 | 
 | ||||||
|   If the Program specifies that a proxy can decide which future |   If the Program specifies that a proxy can decide which future | ||||||
| versions of the GNU Affero General Public License can be used, that proxy's | versions of the GNU General Public License can be used, that proxy's | ||||||
| public statement of acceptance of a version permanently authorizes you | public statement of acceptance of a version permanently authorizes you | ||||||
| to choose that version for the Program. | to choose that version for the Program. | ||||||
| 
 | 
 | ||||||
| @ -633,29 +635,40 @@ the "copyright" line and a pointer to where the full notice is found. | |||||||
|     Copyright (C) <year>  <name of author> |     Copyright (C) <year>  <name of author> | ||||||
| 
 | 
 | ||||||
|     This program is free software: you can redistribute it and/or modify |     This program is free software: you can redistribute it and/or modify | ||||||
|     it under the terms of the GNU Affero General Public License as published by |     it under the terms of the GNU General Public License as published by | ||||||
|     the Free Software Foundation, either version 3 of the License, or |     the Free Software Foundation, either version 3 of the License, or | ||||||
|     (at your option) any later version. |     (at your option) any later version. | ||||||
| 
 | 
 | ||||||
|     This program is distributed in the hope that it will be useful, |     This program is distributed in the hope that it will be useful, | ||||||
|     but WITHOUT ANY WARRANTY; without even the implied warranty of |     but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the |     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|     GNU Affero General Public License for more details. |     GNU General Public License for more details. | ||||||
| 
 | 
 | ||||||
|     You should have received a copy of the GNU Affero General Public License |     You should have received a copy of the GNU General Public License | ||||||
|     along with this program.  If not, see <https://www.gnu.org/licenses/>. |     along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| Also add information on how to contact you by electronic and paper mail. | Also add information on how to contact you by electronic and paper mail. | ||||||
| 
 | 
 | ||||||
|   If your software can interact with users remotely through a computer |   If the program does terminal interaction, make it output a short | ||||||
| network, you should also make sure that it provides a way for users to | notice like this when it starts in an interactive mode: | ||||||
| get its source.  For example, if your program is a web application, its | 
 | ||||||
| interface could display a "Source" link that leads users to an archive |     <program>  Copyright (C) <year>  <name of author> | ||||||
| of the code.  There are many ways you could offer source, and different |     This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. | ||||||
| solutions will be better for different programs; see section 13 for the |     This is free software, and you are welcome to redistribute it | ||||||
| specific requirements. |     under certain conditions; type `show c' for details. | ||||||
|  | 
 | ||||||
|  | The hypothetical commands `show w' and `show c' should show the appropriate | ||||||
|  | parts of the General Public License.  Of course, your program's commands | ||||||
|  | might be different; for a GUI interface, you would use an "about box". | ||||||
| 
 | 
 | ||||||
|   You should also get your employer (if you work as a programmer) or school, |   You should also get your employer (if you work as a programmer) or school, | ||||||
| if any, to sign a "copyright disclaimer" for the program, if necessary. | if any, to sign a "copyright disclaimer" for the program, if necessary. | ||||||
| For more information on this, and how to apply and follow the GNU AGPL, see | For more information on this, and how to apply and follow the GNU GPL, see | ||||||
| <https://www.gnu.org/licenses/>. | <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  |   The GNU General Public License does not permit incorporating your program | ||||||
|  | into proprietary programs.  If your program is a subroutine library, you | ||||||
|  | may consider it more useful to permit linking proprietary applications with | ||||||
|  | the library.  If this is what you want to do, use the GNU Lesser General | ||||||
|  | Public License instead of this License.  But first, please read | ||||||
|  | <https://www.gnu.org/licenses/why-not-lgpl.html>. | ||||||
|  | |||||||
| @ -1 +1 @@ | |||||||
| include *requirements.txt LICENSE WAIVER WAIVER.asc CHANGELOG chainqueue/db/migrations/default/* chainqueue/db/migrations/default/versions/* chainqueue/db/migrations/default/versions/src/* chainqueue/data/config/* | include *requirements.txt LICENSE chainqueue/db/migrations/default/* chainqueue/db/migrations/default/versions/* chainqueue/db/migrations/default/versions/src/* chainqueue/data/config/* | ||||||
|  | |||||||
							
								
								
									
										17
									
								
								WAIVER
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								WAIVER
									
									
									
									
									
								
							| @ -1,17 +0,0 @@ | |||||||
| # Copyright waiver for the python package "chainqueue" |  | ||||||
| 
 |  | ||||||
| I dedicate any and all copyright interest in this software to the |  | ||||||
| public domain. I make this dedication for the benefit of the public at |  | ||||||
| large and to the detriment of my heirs and successors. I intend this |  | ||||||
| dedication to be an overt act of relinquishment in perpetuity of all |  | ||||||
| present and future rights to this software under copyright law. |  | ||||||
| 
 |  | ||||||
| To the best of my knowledge and belief, my contributions are either |  | ||||||
| originally authored by me or are derived from prior works which I have |  | ||||||
| verified are also in the public domain and are not subject to claims |  | ||||||
| of copyright by other parties. |  | ||||||
| 
 |  | ||||||
| To the best of my knowledge and belief, no individual, business, |  | ||||||
| organization, government, or other entity has any copyright interest |  | ||||||
| in my contributions, and I affirm that I will not make contributions |  | ||||||
| that are otherwise encumbered. |  | ||||||
							
								
								
									
										29
									
								
								WAIVER.asc
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								WAIVER.asc
									
									
									
									
									
								
							| @ -1,29 +0,0 @@ | |||||||
| -----BEGIN PGP MESSAGE----- |  | ||||||
| 
 |  | ||||||
| owGVU3tQFHUcvztPxAW1KZNp4vETfJUXaCgRoiQIeGkEiIBOJrt7v7v7cXe7yz64 |  | ||||||
| LhDGfPAoKmi0A8dHmRQlA4xnIeTENT2YkhzIkEc1XlHkCIeTSo+5iX67B2r1V3/c |  | ||||||
| zO3+vt/P87evzpulClQv7hvtrPz47TR18yyeCsjbqM9NzaaLxj+KAiks5+CRySwC |  | ||||||
| O4mKIQ+MLA9EMwScQzSzDOBI2kKaIIikzSRiiiQowUiC0AMDNCCaFCEgGQf+GQBp |  | ||||||
| tQL6NhhiRMhDQf6D0ZAABNYo2kkeApGV4QlOoqyIBgbWhmGjgR7YSAv0j05DI8w+ |  | ||||||
| I4aCDDQiEbBGvzb/MikSVpI3QYXfj4uXRR7ZIKPM2hzADBEvKAOCRNNQEFhekOlk |  | ||||||
| gfIWJiTuIsQolIwHWJyFCEhaweGhVfaOBLOCjD1xkOegKCHRIZ9j7wSH7cqHMpVR |  | ||||||
| EiVsVYlC8Cu7OwKJMeCg74RlJe3RBJHDTlsVZrRbGNZuhYZpgxQWAY06+YBmGeyS |  | ||||||
| kmTJ2ByGhAjv8gSLARGD5eBOJNwfD/GeA9ggwEHKc5gYt4wV8qwNcDzCr+0sbxGA |  | ||||||
| 3YxoM87FTBZDAntHRoTH5BXSKrD+Gm8H72/NXzueYFgRp0sVQpwWNktbSWQTCOzh |  | ||||||
| jkUsgpUV4vvEiwgK/8MvI7MbUDEySKRVByhJQAzuUYfNmkgGPa8UpwMmuTFG7kcn |  | ||||||
| m/Wz4Se5IjMpKPf0v/eTwDb+HahOodcD0mhEvA2LJEX8ZEf4gstOlYv6jwVCGZGT |  | ||||||
| UFjtSMCFMLRkozCHIZqo0sRqVepAVdiixdrAZeNvcI+EV3z2Q0XWzFc5WyN/iypi |  | ||||||
| 7j0zb5JjFkxtzw3Ze2SFIdU192jctk7fS5u7J7qbvloaVRHS8XXt2Cde32+Tq4eu |  | ||||||
| tpz0ZI6csJwrG6gY/X54cLT0/l+butJ8aOLyqZjBd9Z+9+k6b15NeOm16JWxwZ09 |  | ||||||
| Ne9rD6lfpi62ORtLhOX1I5q2xvFvvnhlfuLCa+VrHo59ZkDzRKj7mHt9YnN2lfYM |  | ||||||
| ashqWpLyQHlvxvzE9qSmi5k7+9eeKtBcab1x4nq8yRtZPRaz0FVTsOWpnHlPX1ab |  | ||||||
| g1Z9uyW2uC3GVUVpbzyaUP5LfVJyxINHJnoGrfp2XWj2a40FtYdu9oTteiG10t36 |  | ||||||
| Yq73w9VLb1aMhs1putosUYf7un5ekORtpPdtvbD1fF9HUG2n7UpR+gr1+fVs1s7D |  | ||||||
| hVROiNsUc4kuOdDb8lCE1Du4YZORI/S7XXXtUZungj0NjZXHb3mmPIk/Hs3fVVA2 |  | ||||||
| 4HTOOTu748uGeovxOjdS9+Zf5fUB3SX3habmfw4SawMdnu4z1X0qX+dbf9hsaVmu |  | ||||||
| jncDjxlTTqdb9jjiziE3uTx+E1Aty3iv9INWV513X7hTuLUoklRFJE/sWDnslvqT |  | ||||||
| ug6M7n/8bMNkW0vc7xt0zu2PbQvYfVz75HA6CNpzYSiljA2wcoMZYxt/sqw6+OcO |  | ||||||
| Pj5p6PXM59Ytydib0H8pqNdXWVm9ZlKjPjl177O+BE9ATiGfuz+4KD3f2SflVZWn |  | ||||||
| nuaEvwE= |  | ||||||
| =lGEV |  | ||||||
| -----END PGP MESSAGE----- |  | ||||||
| @ -1,3 +0,0 @@ | |||||||
| from .state import Status |  | ||||||
| from .entry import QueueEntry |  | ||||||
| from .store import Store |  | ||||||
							
								
								
									
										117
									
								
								chainqueue/adapters/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										117
									
								
								chainqueue/adapters/base.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,117 @@ | |||||||
|  | # standard imports | ||||||
|  | import datetime | ||||||
|  | 
 | ||||||
|  | # local imports | ||||||
|  | from chainqueue.enum import StatusBits | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Adapter: | ||||||
|  |     """Base class defining interface to be implemented by chainqueue adapters. | ||||||
|  | 
 | ||||||
|  |     The chainqueue adapter collects the following actions: | ||||||
|  | 
 | ||||||
|  |     - add: add a transaction to the queue | ||||||
|  |     - upcoming: get queued transactions ready to be sent to network | ||||||
|  |     - dispatch: send a queued transaction to the network | ||||||
|  |     - translate: decode details of a transaction | ||||||
|  |     - create_session, release_session: session management to control queue state integrity | ||||||
|  | 
 | ||||||
|  |     :param backend: Chainqueue backend | ||||||
|  |     :type backend: TODO - abstract backend class. Must implement get, create_session, release_session | ||||||
|  |     :param pending_retry_threshold: seconds delay before retrying a transaction stalled in the newtork | ||||||
|  |     :type pending_retry_threshold: int | ||||||
|  |     :param error_retry_threshold: seconds delay before retrying a transaction that incurred a recoverable error state | ||||||
|  |     :type error_retry_threshold: int | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, backend, pending_retry_threshold=0, error_retry_threshold=0): | ||||||
|  |         self.backend = backend | ||||||
|  |         self.pending_retry_threshold = datetime.timedelta(pending_retry_threshold) | ||||||
|  |         self.error_retry_threshold = datetime.timedelta(error_retry_threshold) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def add(self, bytecode, chain_spec, session=None): | ||||||
|  |         """Add a transaction to the queue. | ||||||
|  | 
 | ||||||
|  |         :param bytecode: Transaction wire format bytecode, in hex | ||||||
|  |         :type bytecode: str | ||||||
|  |         :param chain_spec: Chain spec to use for transaction decode | ||||||
|  |         :type chain_spec: chainlib.chain.ChainSpec | ||||||
|  |         :param session: Backend state integrity session | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def translate(self, bytecode, chain_spec): | ||||||
|  |         """Decode details of a transaction. | ||||||
|  | 
 | ||||||
|  |         :param bytecode: Transaction wire format bytecode, in hex | ||||||
|  |         :type bytecode: str | ||||||
|  |         :param chain_spec: Chain spec to use for transaction decode | ||||||
|  |         :type chain_spec: chainlib.chain.ChainSpec | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def get(self, tx_hash, chain_spec, session=None): | ||||||
|  |         """Retrieve serialized transaction represented by the given transaction hash. | ||||||
|  | 
 | ||||||
|  |         :param chain_spec: Chain spec to use for transaction decode | ||||||
|  |         :type chain_spec: chainlib.chain.ChainSpec | ||||||
|  |         :param tx_hash: Transaction hash, in hex | ||||||
|  |         :type tx_hash: str | ||||||
|  |         :param session: Backend state integrity session | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def dispatch(self, chain_spec, rpc, tx_hash, signed_tx, session=None): | ||||||
|  |         """Send a queued transaction to the network. | ||||||
|  | 
 | ||||||
|  |         :param chain_spec: Chain spec to use to identify the transaction network | ||||||
|  |         :type chain_spec: chainlib.chain.ChainSpec | ||||||
|  |         :param rpc: RPC connection to use for transaction send | ||||||
|  |         :type rpc: chainlib.connection.RPCConnection | ||||||
|  |         :param tx_hash: Transaction hash (checksum of transaction), in hex | ||||||
|  |         :type tx_hash: str | ||||||
|  |         :param signed_tx: Transaction wire format bytecode, in hex | ||||||
|  |         :type signed_tx: str | ||||||
|  |         :param session: Backend state integrity session | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def upcoming(self, chain_spec, session=None): | ||||||
|  |         """Get queued transactions ready to be sent to the network. | ||||||
|  | 
 | ||||||
|  |         The transactions will be a combination of newly submitted transactions, previously sent but stalled transactions, and transactions that could temporarily not be submitted. | ||||||
|  | 
 | ||||||
|  |         :param chain_spec: Chain spec to use to identify the transaction network | ||||||
|  |         :type chain_spec: chainlib.chain.ChainSpec | ||||||
|  |         :param session: Backend state integrity session | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def create_session(self, session=None): | ||||||
|  |         """Create a session context to guarantee atomic state change in backend. | ||||||
|  | 
 | ||||||
|  |         :param session: If specified, session will be used instead of creating a new one | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         return self.backend.create_session(session) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def release_session(self, session=None): | ||||||
|  |         """Release a session context created by create_session. | ||||||
|  | 
 | ||||||
|  |         If session parameter is defined, final session destruction will be deferred to the initial provider of the session. In other words; if create_session was called with a session, release_session should symmetrically be called with the same session. | ||||||
|  | 
 | ||||||
|  |         :param session: Session context. | ||||||
|  |         :type session: varies | ||||||
|  |         """ | ||||||
|  |         return self.backend.release_session(session) | ||||||
							
								
								
									
										1
									
								
								chainqueue/cache/__init__.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								chainqueue/cache/__init__.py
									
									
									
									
										vendored
									
									
								
							| @ -1 +0,0 @@ | |||||||
| from .base import * |  | ||||||
							
								
								
									
										141
									
								
								chainqueue/cache/base.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										141
									
								
								chainqueue/cache/base.py
									
									
									
									
										vendored
									
									
								
							| @ -1,141 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import enum |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class NoopNormalizer: |  | ||||||
| 
 |  | ||||||
|     def __init__(self): |  | ||||||
|         self.address = self.noop |  | ||||||
|         self.hash = self.noop |  | ||||||
|         self.value = self.noop |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def noop(self, v): |  | ||||||
|         return v |  | ||||||
| 
 |  | ||||||
| noop_normalizer = NoopNormalizer() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CacheTx: |  | ||||||
| 
 |  | ||||||
|     def __init__(self, chain_spec, normalizer=noop_normalizer): |  | ||||||
|         self.normalizer = normalizer |  | ||||||
|         self.sender = None |  | ||||||
|         self.recipient = None |  | ||||||
|         self.nonce = None |  | ||||||
|         self.value = None |  | ||||||
| 
 |  | ||||||
|         self.hash = None |  | ||||||
|         self.block_number = None |  | ||||||
|         self.tx_index = None |  | ||||||
|         self.timestamp = None |  | ||||||
| 
 |  | ||||||
|         self.src = None |  | ||||||
|         self.chain_spec = chain_spec |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def confirm(self, block_number, tx_index, timestamp): |  | ||||||
|         self.block_number = block_number |  | ||||||
|         self.tx_index = tx_index |  | ||||||
|         self.timestamp = timestamp |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def init(self, tx_hash, nonce, sender, recipient, value): |  | ||||||
|         self.hash = self.normalizer.hash(tx_hash) |  | ||||||
|         self.sender = self.normalizer.address(sender) |  | ||||||
|         self.recipient = self.normalizer.address(recipient) |  | ||||||
|         self.nonce = nonce |  | ||||||
|         self.value = self.normalizer.value(value) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def deserialize(self, signed_tx): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def set(self, k, v): |  | ||||||
|         k = 'v_' + k |  | ||||||
|         setattr(self, k, v) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def __str__(self): |  | ||||||
|         return '{}: {} ({}) -> {} = {}'.format(self.hash, self.sender, self.nonce, self.recipient, self.value) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CacheTokenTx(CacheTx): |  | ||||||
| 
 |  | ||||||
|     def __init__(self, chain_spec, normalizer=noop_normalizer): |  | ||||||
|         super(CacheTokenTx, self).__init__(chain_spec, normalizer=normalizer) |  | ||||||
|         self.v_src_token = None |  | ||||||
|         self.v_src_value = None |  | ||||||
|         self.v_dst_token = None |  | ||||||
|         self.v_dst_value = None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CacheSort(enum.Enum): |  | ||||||
|     DATE = 1 |  | ||||||
|     NONCE = 2 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CacheFilter: |  | ||||||
| 
 |  | ||||||
|     def __init__(self, normalizer=noop_normalizer, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False): |  | ||||||
|         self.normalizer = normalizer |  | ||||||
|         self.senders = None |  | ||||||
|         self.recipients = None |  | ||||||
|         self.nonce = nonce |  | ||||||
|         self.before = before |  | ||||||
|         self.after = after |  | ||||||
|         self.sort = sort |  | ||||||
|         self.reverse = reverse |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def add_senders(self, senders): |  | ||||||
|         if self.senders == None: |  | ||||||
|             self.senders = [] |  | ||||||
|         if isinstance(senders, str): |  | ||||||
|             senders = [senders] |  | ||||||
|         for sender in senders: |  | ||||||
|             if self.normalizer != None: |  | ||||||
|                 sender = self.normalizer.address(sender) |  | ||||||
|             self.senders.append(sender) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def add_recipients(self, recipients): |  | ||||||
|         if self.recipients == None: |  | ||||||
|             self.recipients = [] |  | ||||||
|         if isinstance(recipients, str): |  | ||||||
|             recipients = [recipients] |  | ||||||
|         for recipient in recipients: |  | ||||||
|             if self.normalizer != None: |  | ||||||
|                 recipient = self.normalizer.address(recipient) |  | ||||||
|             self.recipients.append(recipient) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class Cache:  |  | ||||||
| 
 |  | ||||||
|     def put(self, chain_spec, cache_tx): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def get(self, chain_spec, tx_hash): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def by_nonce(self, cache_filter): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def by_date(self, cache_filter=None): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def count(self, cache_filter=None): |  | ||||||
|         raise NotImplementedError() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def set_block(self, block, tx): |  | ||||||
|         raise NotImplementedError() |  | ||||||
							
								
								
									
										10
									
								
								chainqueue/cache/fs.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								chainqueue/cache/fs.py
									
									
									
									
										vendored
									
									
								
							| @ -1,10 +0,0 @@ | |||||||
| # local imports |  | ||||||
| from .base import Cache |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class FsCache(Cache): |  | ||||||
| 
 |  | ||||||
|     def __init__(self, path): |  | ||||||
|         self.path = path |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @ -1,8 +0,0 @@ | |||||||
| def apply_flag(flag): |  | ||||||
|     flag.add('queue') |  | ||||||
|     return flag |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def apply_arg(arg):  |  | ||||||
|     arg.add_long('tx-digest-size', 'queue', type=int, help='Size of transaction hash in bytes') |  | ||||||
|     return arg |  | ||||||
| @ -1,10 +0,0 @@ | |||||||
| def process_config(config, args, flags): |  | ||||||
|         args_override = {} |  | ||||||
| 
 |  | ||||||
|         args_override['QUEUE_BACKEND'] = getattr(args, 'backend') |  | ||||||
|         args_override['TX_DIGEST_SIZE'] = getattr(args, 'tx_digest_size') |  | ||||||
|         args_override['QUEUE_STATE_PATH'] = getattr(args, 'state_dir') |  | ||||||
| 
 |  | ||||||
|         config.dict_override(args_override, 'local cli args') |  | ||||||
| 
 |  | ||||||
|         return config |  | ||||||
| @ -1,8 +1,9 @@ | |||||||
| [queue] | [database] | ||||||
| backend = mem | name = | ||||||
| state_path = | engine = | ||||||
| index_path = | driver = | ||||||
| counter_path = | host = | ||||||
| 
 | port = | ||||||
| [tx] | user = | ||||||
| digest_size = 32 | password = | ||||||
|  | debug = 0 | ||||||
|  | |||||||
| @ -1,151 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from hexathon import ( |  | ||||||
|         add_0x, |  | ||||||
|         strip_0x, |  | ||||||
|         uniform, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.cache import CacheTx |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def normalize_hex(k): |  | ||||||
|     k = strip_0x(k) |  | ||||||
|     return uniform(k) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class QueueEntry: |  | ||||||
| 
 |  | ||||||
|     def __init__(self, store, tx_hash=None, cache_adapter=CacheTx): |  | ||||||
|         self.store = store |  | ||||||
|         #self.tx_hash = normalize_hex(tx_hash) |  | ||||||
|         self.tx_hash = tx_hash |  | ||||||
|         self.signed_tx = None |  | ||||||
|         self.seq = None |  | ||||||
|         self.k = None |  | ||||||
|         self.synced = False |  | ||||||
|         self.cache_adapter = cache_adapter |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def serialize(self): |  | ||||||
|         return self.signed_tx |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def create(self, signed_tx): |  | ||||||
|         signed_tx = normalize_hex(signed_tx) |  | ||||||
|         (s, tx_hash) = self.store.put(signed_tx, cache_adapter=self.cache_adapter) |  | ||||||
|         self.k = s |  | ||||||
|         self.synced = True |  | ||||||
|         return tx_hash |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def local_state(self): |  | ||||||
|         state = self.store.state(self.k) |  | ||||||
|         state_str = self.store.name(state) |  | ||||||
|         return (state, state_str,) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def load(self): |  | ||||||
|         (self.k, self.signed_tx) = self.store.get(self.tx_hash) |  | ||||||
|         self.synced = True |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def __match_state(self, state): |  | ||||||
|         return bool(self.store.state(self.k) & state) |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def waitforfunds(self): |  | ||||||
|         if self.__match_state(self.store.INSUFFICIENT_FUNDS): |  | ||||||
|             return |  | ||||||
|         self.store.move(self.k, self.store.INSUFFICIENT_FUNDS) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def fubar(self): |  | ||||||
|         if self.__match_state(self.store.UNKNOWN_ERROR): |  | ||||||
|             return |  | ||||||
|         self.store.set(self.k, self.store.UNKNOWN_ERROR) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def reject(self): |  | ||||||
|         if self.__match_state(self.store.NODE_ERROR): |  | ||||||
|             return |  | ||||||
|         self.store.set(self.k, self.store.NODE_ERROR) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def override(self, manual=False): |  | ||||||
|         if manual: |  | ||||||
|             self.store.set(self.k, self.store.OBSOLETE | self.store.MANUAL) |  | ||||||
|         else: |  | ||||||
|             self.store.set(self.k, self.store.OBSOLETE) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def manual(self): |  | ||||||
|         self.store.set(self.k, self.store.MANUAL) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def retry(self): |  | ||||||
|         if self.__match_state(self.store.QUEUED): |  | ||||||
|             return |  | ||||||
|         self.store.change(self.k, self.store.QUEUED, self.store.INSUFFICIENT_FUNDS) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def readysend(self): |  | ||||||
|         if self.__match_state(self.store.QUEUED): |  | ||||||
|             return |  | ||||||
|         self.store.change(self.k, self.store.QUEUED, self.store.INSUFFICIENT_FUNDS) |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def sent(self): |  | ||||||
|         if self.__match_state(self.store.IN_NETWORK): |  | ||||||
|             return |  | ||||||
|         self.store.change(self.k, self.store.IN_NETWORK, self.store.RESERVED | self.store.DEFERRED | self.store.QUEUED | self.store.LOCAL_ERROR | self.store.NODE_ERROR) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def sendfail(self): |  | ||||||
|         if self.__match_state(self.store.NODE_ERROR): |  | ||||||
|             return |  | ||||||
|         self.store.change(self.k, self.store.LOCAL_ERROR | self.store.DEFERRED, self.store.RESERVED | self.store.QUEUED | self.store.INSUFFICIENT_FUNDS) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def reserve(self): |  | ||||||
|         if self.__match_state(self.store.RESERVED): |  | ||||||
|             return |  | ||||||
|         self.store.change(self.k, self.store.RESERVED, self.store.QUEUED)  |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def fail(self, block, tx): |  | ||||||
|         if self.__match_state(self.store.NETWORK_ERROR): |  | ||||||
|             return |  | ||||||
|         v = self.store.state(self.k) |  | ||||||
|         self.store.change(self.k, v | self.store.NETWORK_ERROR, self.store.QUEUED) |  | ||||||
|         if self.store.cache: |  | ||||||
|             self.store.cache.set_block(self.tx_hash, block, tx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def cancel(self, confirmed=False): |  | ||||||
|         if confirmed: |  | ||||||
|             self.store.change(self.k, self.store.OBSOLETE | self.store.FINAL, self.store.RESERVED | self.store.QUEUED) |  | ||||||
|         else: |  | ||||||
|             self.store.change(self.k, self.store.OBSOLETE, self.store.RESERVED | self.store.QUEUED) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def succeed(self, block, tx): |  | ||||||
|         self.store.set(self.k, self.store.FINAL) |  | ||||||
|         if self.store.cache: |  | ||||||
|             self.store.cache.set_block(self.tx_hash, block, tx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test(self, state): |  | ||||||
|         return self.__match_state(state) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def __str__(self): |  | ||||||
|         v = self.store.get(self.tx_hash) |  | ||||||
|         n = self.store.state(v[0]) |  | ||||||
|         s = self.store.name(n) |  | ||||||
|         return '{}: {} ({})'.format(self.k, s, n) |  | ||||||
| @ -42,6 +42,7 @@ class StatusEnum(enum.IntEnum): | |||||||
|     """ |     """ | ||||||
|     PENDING = 0 |     PENDING = 0 | ||||||
|     """Transaction has been added but no processing has been performed""" |     """Transaction has been added but no processing has been performed""" | ||||||
|  | 
 | ||||||
|     SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR |     SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR | ||||||
|     """Temporary error occurred when sending transaction to node""" |     """Temporary error occurred when sending transaction to node""" | ||||||
|     RETRY = StatusBits.QUEUED | StatusBits.DEFERRED  |     RETRY = StatusBits.QUEUED | StatusBits.DEFERRED  | ||||||
|  | |||||||
| @ -24,7 +24,8 @@ class CacheIntegrityError(ChainQueueException): | |||||||
|     pass |     pass | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class DuplicateTxError(ChainQueueException): | class BackendIntegrityError(ChainQueueException): | ||||||
|     """Backend already knows transaction |     """Raised when queue backend has invalid state | ||||||
|     """ |     """ | ||||||
|     pass |     pass | ||||||
|  | 
 | ||||||
|  | |||||||
| @ -4,19 +4,15 @@ | |||||||
| import os | import os | ||||||
| import logging | import logging | ||||||
| import sys | import sys | ||||||
| import importlib |  | ||||||
| 
 | 
 | ||||||
| # external imports | # external imports | ||||||
| from hexathon import add_0x | from hexathon import add_0x | ||||||
| import chainlib.cli | import chainlib.cli | ||||||
| from chainlib.chain import ChainSpec | from chainlib.chain import ChainSpec | ||||||
|  | from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer | ||||||
| 
 | 
 | ||||||
| # local imports | # local imports | ||||||
| import chainqueue.cli | from chainqueue.cli import Outputter | ||||||
| #from chainqueue.cli.output import Outputter |  | ||||||
| from chainqueue.settings import ChainqueueSettings |  | ||||||
| from chainqueue.store import Store |  | ||||||
| from chainqueue.entry import QueueEntry |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| logging.basicConfig(level=logging.WARNING) | logging.basicConfig(level=logging.WARNING) | ||||||
| @ -25,31 +21,30 @@ logg = logging.getLogger() | |||||||
| script_dir = os.path.dirname(os.path.realpath(__file__))  | script_dir = os.path.dirname(os.path.realpath(__file__))  | ||||||
| config_dir = os.path.join(script_dir, '..', 'data', 'config') | config_dir = os.path.join(script_dir, '..', 'data', 'config') | ||||||
| 
 | 
 | ||||||
| arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC | chainlib.cli.Flag.UNSAFE | arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC | ||||||
| argparser = chainlib.cli.ArgumentParser(arg_flags) | argparser = chainlib.cli.ArgumentParser(arg_flags) | ||||||
| argparser.add_argument('--backend', type=str, default='sql', help='Backend to use') | argparser.add_argument('--backend', type=str, default='sql', help='Backend to use (currently only "sql")') | ||||||
|  | argparser.add_argument('--start', type=str, help='Oldest transaction hash to include in results') | ||||||
|  | argparser.add_argument('--end', type=str, help='Newest transaction hash to include in results') | ||||||
| argparser.add_argument('--error', action='store_true', help='Only show transactions which have error state') | argparser.add_argument('--error', action='store_true', help='Only show transactions which have error state') | ||||||
| argparser.add_argument('--no-final', action='store_true', dest='no_final', help='Omit finalized transactions') | argparser.add_argument('--pending', action='store_true', help='Omit finalized transactions') | ||||||
| argparser.add_argument('--status-mask', type=str, dest='status_mask', action='append', default=[], help='Manually specify status bitmask value to match (overrides --error and --pending)') | argparser.add_argument('--status-mask', type=int, dest='status_mask', help='Manually specify status bitmask value to match (overrides --error and --pending)') | ||||||
| argparser.add_argument('--exact', action='store_true', help='Match status exact') | argparser.add_argument('--summary', action='store_true', help='output summary for each status category') | ||||||
| argparser.add_argument('--include-pending', action='store_true', dest='include_pending', help='Include transactions in unprocessed state (pending)') | argparser.add_argument('-o', '--column', dest='column', action='append', type=str, help='add a column to display') | ||||||
| argparser.add_argument('--renderer', type=str, default=[], action='append', help='Transaction renderer for output') | argparser.add_positional('address', type=str, help='Ethereum address of recipient') | ||||||
| argparser.add_positional('address', required=False, type=str, help='Ethereum address of recipient') |  | ||||||
| args = argparser.parse_args() | args = argparser.parse_args() | ||||||
| extra_args = { | extra_args = { | ||||||
|     'address': None, |     'address': None, | ||||||
|     'backend': None, |     'backend': None, | ||||||
|     'state_dir': None, |     'start': None, | ||||||
|     'exact': None, |     'end': None, | ||||||
|     'error': None, |     'error': None, | ||||||
|     'include_pending': '_PENDING', |     'pending': None, | ||||||
|     'status_mask': None, |     'status_mask': None, | ||||||
|     'no_final': None, |     'column': None, | ||||||
|     'renderer': None, |     'summary': None, | ||||||
|         } |         } | ||||||
| config = chainlib.cli.Config.from_args(args, arg_flags, extra_args=extra_args, base_config_dir=config_dir) | config = chainlib.cli.Config.from_args(args, arg_flags, extra_args=extra_args, base_config_dir=config_dir) | ||||||
| config = chainqueue.cli.config.process_config(config, args, 0) |  | ||||||
| logg.debug('config loaded:\n{}'.format(config)) |  | ||||||
| 
 | 
 | ||||||
| chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC')) | chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC')) | ||||||
| 
 | 
 | ||||||
| @ -63,40 +58,36 @@ if status_mask == None: | |||||||
| 
 | 
 | ||||||
| tx_getter = None | tx_getter = None | ||||||
| tx_lister = None | tx_lister = None | ||||||
| #output_cols = config.get('_COLUMN') | session_method = None | ||||||
|  | if config.get('_BACKEND') == 'sql': | ||||||
|  |     from chainqueue.sql.query import get_account_tx as tx_lister | ||||||
|  |     from chainqueue.sql.query import get_tx_cache as tx_getter | ||||||
|  |     from chainqueue.runnable.sql import setup_backend | ||||||
|  |     from chainqueue.db.models.base import SessionBase | ||||||
|  |     setup_backend(config, debug=config.true('DATABASE_DEBUG')) | ||||||
|  |     session_method = SessionBase.create_session | ||||||
|  | else: | ||||||
|  |     raise NotImplementedError('backend {} not implemented'.format(config.get('_BACKEND'))) | ||||||
| 
 | 
 | ||||||
| renderers_mods = [] | output_cols = config.get('_COLUMN') | ||||||
| for renderer in config.get('_RENDERER'): |  | ||||||
|     m = importlib.import_module(renderer) |  | ||||||
|     renderers_mods.append(m) |  | ||||||
|     logg.info('using renderer module {}'.format(renderer)) |  | ||||||
| 
 |  | ||||||
| settings = ChainqueueSettings() |  | ||||||
| settings.process(config) |  | ||||||
| logg.debug('settings:\n{}'.format(settings)) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def main(): | def main(): | ||||||
| #    since = config.get('_START', None) |     since = config.get('_START', None) | ||||||
| #    if since != None: |     if since != None: | ||||||
| #        since = add_0x(since) |         since = add_0x(since) | ||||||
| #    until = config.get('_END', None) |     until = config.get('_END', None) | ||||||
| #    if until != None: |     if until != None: | ||||||
| #        until = add_0x(until) |         until = add_0x(until) | ||||||
| #    txs = tx_lister(chain_spec, config.get('_ADDRESS'), since=since, until=until, status=status_mask, not_status=not_status_mask) |     txs = tx_lister(chain_spec, config.get('_ADDRESS'), since=since, until=until, status=status_mask, not_status=not_status_mask) | ||||||
|     txs = settings.get('QUEUE_STORE').by_state(state=settings.get('QUEUE_STATUS_FILTER'), strict=config.get('_EXACT'), include_pending=config.get('_PENDING')) |     outputter = Outputter(chain_spec, sys.stdout, tx_getter, session_method=session_method, decode_status=config.true('_RAW'), cols=output_cols) | ||||||
|      |     if config.get('_SUMMARY'): | ||||||
|     for i, tx_hash in enumerate(txs): |         for k in txs.keys(): | ||||||
|         entry = QueueEntry(settings.get('QUEUE_STORE'), tx_hash) |             outputter.add(k) | ||||||
|         entry.load() |         outputter.decode_summary() | ||||||
|         v = None |  | ||||||
|         if len(renderers_mods) == 0: |  | ||||||
|             v = str(entry) |  | ||||||
|     else: |     else: | ||||||
|             for m in renderers_mods: |         for k in txs.keys(): | ||||||
|                 v = m.apply(i, settings, v, settings.get('CHAIN_SPEC'), entry) |             outputter.decode_single(k) | ||||||
|         print(v) |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     main() |     main() | ||||||
|  | |||||||
							
								
								
									
										14
									
								
								chainqueue/runnable/sql.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								chainqueue/runnable/sql.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,14 @@ | |||||||
|  | # standard imports | ||||||
|  | import logging | ||||||
|  | 
 | ||||||
|  | # local imports | ||||||
|  | from chainqueue.db.models.base import SessionBase | ||||||
|  | from chainqueue.db import dsn_from_config | ||||||
|  | 
 | ||||||
|  | logg = logging.getLogger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def setup_backend(config, debug=False): | ||||||
|  |         dsn = dsn_from_config(config) | ||||||
|  |         logg.debug('dsn {}'.format(dsn)) | ||||||
|  |         SessionBase.connect(dsn, debug=debug) | ||||||
| @ -1,51 +0,0 @@ | |||||||
| # SPDX-License-Identifier: GPL-3.0-or-later |  | ||||||
| 
 |  | ||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| import argparse |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.state import Status |  | ||||||
| 
 |  | ||||||
| argparser = argparse.ArgumentParser() |  | ||||||
| argparser.add_argument('-r', '--raw', dest='r', action='store_true', help='Always print pure state element strings') |  | ||||||
| argparser.add_argument('state', type=str, help='State to interpret') |  | ||||||
| args = argparser.parse_args() |  | ||||||
| 
 |  | ||||||
| status_interpreter = Status(None, allow_invalid=True) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def handle_numeric(v, elements=False): |  | ||||||
|     if elements: |  | ||||||
|         if not status_interpreter.is_pure(v): |  | ||||||
|             return status_interpreter.elements(v) |  | ||||||
|     return status_interpreter.name(v) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def handle_string(v): |  | ||||||
|     try: |  | ||||||
|         return status_interpreter.from_name(v) |  | ||||||
|     except AttributeError: |  | ||||||
|         return status_interpreter.from_elements(v) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def main(): |  | ||||||
|     v = None |  | ||||||
|     numeric = False |  | ||||||
|     try: |  | ||||||
|         v = int(args.state) |  | ||||||
|         numeric = True |  | ||||||
|     except: |  | ||||||
|         v = args.state |  | ||||||
|        |  | ||||||
|     r = None |  | ||||||
|     if numeric: |  | ||||||
|         r = handle_numeric(v, elements=args.r) |  | ||||||
|     else: |  | ||||||
|         r = handle_string(v) |  | ||||||
| 
 |  | ||||||
|     print(r) |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     main() |  | ||||||
| @ -1,99 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from chainlib.settings import ChainSettings |  | ||||||
| from chainqueue.state import Status |  | ||||||
| from chainqueue.store import Store |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue_tx(settings, config): |  | ||||||
|     settings.set('TX_DIGEST_SIZE', config.get('TX_DIGEST_SIZE')) |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue_store(settings, config): |  | ||||||
|     status = Status(settings.get('QUEUE_STORE_FACTORY'), allow_invalid=True) |  | ||||||
|     settings.set('QUEUE_STATE_STORE', status) |  | ||||||
|     store = Store( |  | ||||||
|         settings.get('CHAIN_SPEC'), |  | ||||||
|         settings.get('QUEUE_STATE_STORE'), |  | ||||||
|         settings.get('QUEUE_INDEX_STORE'), |  | ||||||
|         settings.get('QUEUE_COUNTER_STORE'), |  | ||||||
|         sync=True, |  | ||||||
|         ) |  | ||||||
|     settings.set('QUEUE_STORE', store) |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue_paths(settings, config): |  | ||||||
|     index_dir = config.get('QUEUE_INDEX_PATH') |  | ||||||
|     if index_dir == None: |  | ||||||
|         index_dir = os.path.join(config.get('STATE_PATH'), 'tx') |  | ||||||
| 
 |  | ||||||
|     counter_dir = config.get('QUEUE_COUNTER_PATH') |  | ||||||
|     if counter_dir == None: |  | ||||||
|         counter_dir = os.path.join(config.get('STATE_PATH')) |  | ||||||
| 
 |  | ||||||
|     settings.set('QUEUE_STATE_PATH', config.get('STATE_PATH')) |  | ||||||
|     settings.set('QUEUE_INDEX_PATH', index_dir) |  | ||||||
|     settings.set('QUEUE_COUNTER_PATH', counter_dir) |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue_backend_fs(settings, config): |  | ||||||
|     from chainqueue.store.fs import IndexStore |  | ||||||
|     from chainqueue.store.fs import CounterStore |  | ||||||
|     from shep.store.file import SimpleFileStoreFactory |  | ||||||
|     index_store = IndexStore(settings.o['QUEUE_INDEX_PATH'], digest_bytes=int(settings.o['TX_DIGEST_SIZE'])) |  | ||||||
|     counter_store = CounterStore(settings.o['QUEUE_COUNTER_PATH']) |  | ||||||
|     factory = SimpleFileStoreFactory(settings.o['QUEUE_STATE_PATH'], use_lock=True).add |  | ||||||
| 
 |  | ||||||
|     settings.set('QUEUE_INDEX_STORE', index_store) |  | ||||||
|     settings.set('QUEUE_COUNTER_STORE', counter_store) |  | ||||||
|     settings.set('QUEUE_STORE_FACTORY', factory) |  | ||||||
| 
 |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue_status_filter(settings, config): |  | ||||||
|     states = 0 |  | ||||||
|     store = settings.get('QUEUE_STATE_STORE') |  | ||||||
|     if len(config.get('_STATUS_MASK')) == 0: |  | ||||||
|         for v in store.all(numeric=True): |  | ||||||
|             states |= v |  | ||||||
|         logg.debug('state store {}'.format(states)) |  | ||||||
|     else: |  | ||||||
|         for v in config.get('_STATUS_MASK'): |  | ||||||
|             try: |  | ||||||
|                 states |= int(v) |  | ||||||
|                 continue |  | ||||||
|             except ValueError: |  | ||||||
|                 pass |  | ||||||
|              |  | ||||||
|             state = store.from_name(v) |  | ||||||
|             logg.debug('resolved state argument {} to numeric state {}'.format(v, state)) |  | ||||||
|             states |= state |  | ||||||
| 
 |  | ||||||
|     settings.set('QUEUE_STATUS_FILTER', states) |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_queue(settings, config): |  | ||||||
|     settings = process_queue_tx(settings, config) |  | ||||||
|     settings = process_queue_paths(settings, config) |  | ||||||
|     if config.get('QUEUE_BACKEND') == 'fs': |  | ||||||
|         settings = process_queue_backend_fs(settings, config) |  | ||||||
|     settings = process_queue_backend(settings, config) |  | ||||||
|     settings = process_queue_store(settings, config) |  | ||||||
|     settings = process_queue_status_filter(settings, config) |  | ||||||
|     return settings |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def process_settings(settings, config): |  | ||||||
|     super(ChainqueueSettings, settings).process(config) |  | ||||||
|     settings = settings.process_queue(settings, config)  |  | ||||||
|     return settings |  | ||||||
| @ -462,70 +462,6 @@ def get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, count | |||||||
| 
 | 
 | ||||||
|     return txs |     return txs | ||||||
| 
 | 
 | ||||||
| def get_latest_txs(chain_spec,  count=10, since=None, until=None, status=None, not_status=None, status_target=None, session=None): |  | ||||||
|     """Returns the lastest local queue transactions |  | ||||||
| 
 |  | ||||||
|     The since parameter effect depends on its type. Results are returned inclusive of the given parameter condition. |  | ||||||
| 
 |  | ||||||
|     :param chain_spec: Chain spec for transaction network |  | ||||||
|     :type chain_spec: chainlib.chain.ChainSpec |  | ||||||
|     :param status: Only include transactions where the given status bits are set |  | ||||||
|     :type status: chainqueue.enum.StatusEnum |  | ||||||
|     :param not_status: Only include transactions where the given status bits are not set |  | ||||||
|     :type not_status: chainqueue.enum.StatusEnum |  | ||||||
|     :param status_target: Only include transaction where the status argument is exact match |  | ||||||
|     :type status_target: chainqueue.enum.StatusEnum |  | ||||||
|     :param session: Backend state integrity session |  | ||||||
|     :type session: varies |  | ||||||
|     :raises ValueError: If address is set to be neither sender nor recipient |  | ||||||
|     :returns: Transactions  |  | ||||||
|     :rtype: dict, with transaction hash as key, signed raw transaction as value |  | ||||||
|     """ |  | ||||||
|     txs = {} |  | ||||||
| 
 |  | ||||||
|     session = SessionBase.bind_session(session) |  | ||||||
| 
 |  | ||||||
|     try: |  | ||||||
|         filter_offset = sql_range_filter(session, criteria=since) |  | ||||||
|         filter_limit = sql_range_filter(session, criteria=until) |  | ||||||
|     except NotLocalTxError as e: |  | ||||||
|         logg.error('query build failed: {}'.format(e)) |  | ||||||
|         return {} |  | ||||||
| 
 |  | ||||||
|     q = session.query(Otx) |  | ||||||
|     q = q.join(TxCache) |  | ||||||
| 
 |  | ||||||
|     if filter_offset != None: |  | ||||||
|         if filter_offset[0] == 'id': |  | ||||||
|             q = q.filter(Otx.id>=filter_offset[1]) |  | ||||||
|         elif filter_offset[0] == 'date': |  | ||||||
|             q = q.filter(Otx.date_created>=filter_offset[1]) |  | ||||||
| 
 |  | ||||||
|     if filter_limit != None: |  | ||||||
|         if filter_limit[0] == 'id': |  | ||||||
|             q = q.filter(Otx.id<=filter_limit[1]) |  | ||||||
|         elif filter_limit[0] == 'date': |  | ||||||
|             q = q.filter(Otx.date_created<=filter_limit[1]) |  | ||||||
| 
 |  | ||||||
|     if status != None: |  | ||||||
|         if status_target == None: |  | ||||||
|             status_target = status |  | ||||||
|         q = q.filter(Otx.status.op('&')(status)==status_target) |  | ||||||
|      |  | ||||||
|     if not_status != None: |  | ||||||
|         q = q.filter(Otx.status.op('&')(not_status)==0) |  | ||||||
| 
 |  | ||||||
|     q = q.order_by(Otx.date_created.desc(), Otx.nonce.desc()).limit(count) |  | ||||||
|     results = q.all() |  | ||||||
|     for r in results: |  | ||||||
|         if txs.get(r.tx_hash) != None: |  | ||||||
|             logg.debug('tx {} already recorded'.format(r.tx_hash)) |  | ||||||
|             continue |  | ||||||
|         txs[r.tx_hash] = r.signed_tx |  | ||||||
| 
 |  | ||||||
|     SessionBase.release_session(session) |  | ||||||
| 
 |  | ||||||
|     return txs |  | ||||||
| 
 | 
 | ||||||
| def count_tx(chain_spec, sender=None, status=None, status_target=None, session=None): | def count_tx(chain_spec, sender=None, status=None, status_target=None, session=None): | ||||||
|     """Count transaction records matching the given criteria. |     """Count transaction records matching the given criteria. | ||||||
|  | |||||||
| @ -1,140 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| import shep.persist |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class Verify: |  | ||||||
| 
 |  | ||||||
|     def verify(self, state_store, key, from_state, to_state): |  | ||||||
|         to_state_name = state_store.name(to_state) |  | ||||||
|         m = None |  | ||||||
|         try:   |  | ||||||
|             m = getattr(self, to_state_name) |  | ||||||
|         except AttributeError: |  | ||||||
|             return None |  | ||||||
| 
 |  | ||||||
|         r = m(state_store, from_state) |  | ||||||
|         if r != None: |  | ||||||
|             from_state_name = state_store.name(from_state) |  | ||||||
|             r = '{} -> {}: {}'.format(from_state_name, to_state_name, r) |  | ||||||
| 
 |  | ||||||
|         return r |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def INSUFFICIENT_FUNDS(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.IN_NETWORK: |  | ||||||
|             return 'already in network' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def UNKNOWN_ERROR(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.RESERVED: |  | ||||||
|             return 'not reserved' |  | ||||||
|         if from_state & state_store.mask_error: |  | ||||||
|             return 'already in error state' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def NODE_ERROR(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.IN_NETWORK: |  | ||||||
|             return 'already in network' |  | ||||||
|         if not from_state & state_store.RESERVED: |  | ||||||
|             return 'not reserved' |  | ||||||
|         if from_state & state_store.mask_error: |  | ||||||
|             return 'already in error state' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def NETWORK_ERROR(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.IN_NETWORK: |  | ||||||
|             return 'already in network' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def OBSOLETE(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.IN_NETWORK: |  | ||||||
|             return 'already in network' |  | ||||||
|         if from_state & state_store.OBSOLETE: |  | ||||||
|             return 'already obsolete' |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def MANUAL(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def QUEUED(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
|         if from_state & state_store.IN_NETWORK: |  | ||||||
|             if not from_state & state_store.mask_error: |  | ||||||
|                 return 'not in error state' |  | ||||||
|         elif from_state & state_store.mask_error: |  | ||||||
|             return 'no first send on error state' |  | ||||||
|            |  | ||||||
| 
 |  | ||||||
|     def SENDFAIL(self, state_store, from_state): |  | ||||||
|         return self.NODE_ERROR(state_store, from_state) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def FINAL(self, state_store, from_state): |  | ||||||
|         if from_state & state_store.FINAL: |  | ||||||
|             return 'already finalized' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def _MINEFAIL(self, state_store, from_state): |  | ||||||
|         return self.NETWORK_ERROR(state_store, from_state) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def _CANCEL(self, state_store, from_state): |  | ||||||
|         if from_state: |  | ||||||
|             if from_state & state_store.FINAL: |  | ||||||
|                 return 'already finalized' |  | ||||||
|             if not from_state & (state_store.OBSOLETE | state_store.IN_NETWORK): |  | ||||||
|                 return 'can only cancel state having OBSOLETE and/or IN_NETWORK' |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class Status(shep.persist.PersistedState): |  | ||||||
| 
 |  | ||||||
|     bits = 12 |  | ||||||
|     |  | ||||||
|     def __init__(self, store_factory, allow_invalid=False, event_callback=None): |  | ||||||
|         verify = Verify().verify |  | ||||||
|         self.set_default_state('PENDING') |  | ||||||
|         super(Status, self).__init__(store_factory, self.bits, verifier=verify, check_alias=not allow_invalid, event_callback=event_callback) |  | ||||||
|         self.add('QUEUED') |  | ||||||
|         self.add('RESERVED') |  | ||||||
|         self.add('IN_NETWORK') |  | ||||||
|         self.add('DEFERRED') |  | ||||||
|         self.add('INSUFFICIENT_FUNDS') |  | ||||||
|         self.add('LOCAL_ERROR') |  | ||||||
|         self.add('NODE_ERROR') |  | ||||||
|         self.add('NETWORK_ERROR') |  | ||||||
|         self.add('UNKNOWN_ERROR') |  | ||||||
|         self.add('FINAL') |  | ||||||
|         self.add('OBSOLETE') |  | ||||||
|         self.add('MANUAL') |  | ||||||
| 
 |  | ||||||
|         self.alias('SENDFAIL', self.DEFERRED | self.LOCAL_ERROR) |  | ||||||
|         self.alias('RETRY', self.DEFERRED | self.QUEUED) |  | ||||||
|         self.alias('OBSOLETED', self.OBSOLETE | self.IN_NETWORK) |  | ||||||
|         self.alias('FUBAR', self.FINAL | self.UNKNOWN_ERROR) |  | ||||||
|         self.alias('CANCELLED', self.IN_NETWORK | self.FINAL | self.OBSOLETE) |  | ||||||
|         self.alias('OVERRIDDEN', self.FINAL | self.OBSOLETE | self.MANUAL) |  | ||||||
|         self.alias('REJECTED', self.NODE_ERROR | self.FINAL) |  | ||||||
|         self.alias('REVERTED', self.IN_NETWORK | self.FINAL | self.NETWORK_ERROR) |  | ||||||
|         self.alias('SUCCESS', self.IN_NETWORK | self.FINAL) |  | ||||||
|         self.alias('_MINEFAIL', self.FINAL | self.NETWORK_ERROR) |  | ||||||
|         self.alias('_CANCEL', self.FINAL | self.OBSOLETE) |  | ||||||
| 
 |  | ||||||
|         self.mask_error = self.LOCAL_ERROR | self.NODE_ERROR | self.NETWORK_ERROR | self.UNKNOWN_ERROR |  | ||||||
| @ -1,5 +0,0 @@ | |||||||
| from .base import ( |  | ||||||
|         to_key, |  | ||||||
|         from_key, |  | ||||||
|         Store, |  | ||||||
|         ) |  | ||||||
| @ -1,218 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import re |  | ||||||
| import datetime |  | ||||||
| import logging |  | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.cache import CacheTx |  | ||||||
| from chainqueue.entry import QueueEntry |  | ||||||
| from chainqueue.error import NotLocalTxError |  | ||||||
| from chainqueue.enum import ( |  | ||||||
|         StatusBits, |  | ||||||
|         all_errors, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def to_key(t, n, k): |  | ||||||
|     return '{}_{}_{}'.format(t, n, k) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def from_key(k): |  | ||||||
|     (ts_str, seq_str, tx_hash) = k.split('_') |  | ||||||
|     return (float(ts_str), int(seq_str), tx_hash, ) |  | ||||||
| 
 |  | ||||||
| all_local_errors = all_errors() - StatusBits.NETWORK_ERROR |  | ||||||
| 
 |  | ||||||
| re_u = r'^[^_][_A-Z]+$' |  | ||||||
| class Store: |  | ||||||
| 
 |  | ||||||
|     def __init__(self, chain_spec, state_store, index_store, counter, cache=None, sync=True): |  | ||||||
|         self.chain_spec = chain_spec |  | ||||||
|         self.cache = cache |  | ||||||
|         self.state_store = state_store |  | ||||||
|         self.index_store = index_store |  | ||||||
|         self.counter = counter |  | ||||||
|         for s in dir(self.state_store): |  | ||||||
|             if not re.match(re_u, s): |  | ||||||
|                 continue |  | ||||||
|             v = self.state_store.from_name(s) |  | ||||||
|             setattr(self, s, v) |  | ||||||
|         for v in [ |  | ||||||
|                 'state', |  | ||||||
|                 'change', |  | ||||||
|                 'set', |  | ||||||
|                 'unset', |  | ||||||
|                 'name', |  | ||||||
|                 'modified', |  | ||||||
|                 'purge', |  | ||||||
|                 ]: |  | ||||||
|             setattr(self, v, getattr(self.state_store, v)) |  | ||||||
| 
 |  | ||||||
|         if not sync: |  | ||||||
|             return |  | ||||||
| 
 |  | ||||||
|         sync_err = None |  | ||||||
|         try: |  | ||||||
|             self.state_store.sync() |  | ||||||
|         except Exception as e: |  | ||||||
|             sync_err = e |  | ||||||
| 
 |  | ||||||
|         if sync_err != None: |  | ||||||
|             raise FileNotFoundError(sync_err) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def put(self, v, cache_adapter=CacheTx): |  | ||||||
|         tx = cache_adapter(self.chain_spec) |  | ||||||
|         tx.deserialize(v) |  | ||||||
|         k = tx.hash |  | ||||||
|         n = self.counter.next() |  | ||||||
|         t = datetime.datetime.now().timestamp() |  | ||||||
|         s = to_key(t, n, k) |  | ||||||
|         self.index_store.put(k, s) |  | ||||||
|         self.state_store.put(s, v) |  | ||||||
|         if self.cache != None: |  | ||||||
|             self.cache.put(self.chain_spec, tx)  |  | ||||||
|         return (s, k,) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def get(self, k): |  | ||||||
|         v = None |  | ||||||
|         s = self.index_store.get(k) |  | ||||||
|         err = None |  | ||||||
|         try: |  | ||||||
|             v = self.state_store.get(s) |  | ||||||
|         except FileNotFoundError as e: |  | ||||||
|             err = e |  | ||||||
|         if v == None: |  | ||||||
|             raise NotLocalTxError('could not find tx {}: {}'.format(k, err)) |  | ||||||
|         return (s, v,) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def by_state(self, state=0, not_state=0, include_pending=False, limit=4096, strict=False, threshold=None): |  | ||||||
|         hashes = [] |  | ||||||
|         i = 0 |  | ||||||
|    |  | ||||||
|         refs_state = [] |  | ||||||
|         if state > 0: |  | ||||||
|             if self.state_store.is_pure(state): |  | ||||||
|                 refs_state = self.state_store.list(state) |  | ||||||
|             elif strict: |  | ||||||
|                 refs_state = self.state_store.list(state) |  | ||||||
|             else: |  | ||||||
|                 for v in self.state_store.elements(state, numeric=True): |  | ||||||
|                     refs_state += self.state_store.list(v) |  | ||||||
|                 refs_state = list(set(refs_state)) |  | ||||||
|         if include_pending: |  | ||||||
|             refs_state += self.state_store.list(0) |  | ||||||
| 
 |  | ||||||
|         refs_state.sort() |  | ||||||
| 
 |  | ||||||
|         for ref in refs_state: |  | ||||||
|             v = from_key(ref) |  | ||||||
|             hsh = v[2] |  | ||||||
| 
 |  | ||||||
|             item_state = self.state_store.state(ref) |  | ||||||
| 
 |  | ||||||
|             if strict: |  | ||||||
|                 if item_state & state != item_state: |  | ||||||
|                     continue |  | ||||||
| 
 |  | ||||||
|             if item_state & not_state > 0: |  | ||||||
|                 continue |  | ||||||
| 
 |  | ||||||
|             item_state_str = self.state_store.name(item_state) |  | ||||||
|             logg.info('state {} {} ({})'.format(ref, item_state_str, item_state)) |  | ||||||
| 
 |  | ||||||
|             if threshold != None: |  | ||||||
|                 v = self.state_store.modified(ref) |  | ||||||
|                 if v > threshold: |  | ||||||
|                     continue |  | ||||||
| 
 |  | ||||||
|             hashes.append(hsh) |  | ||||||
| 
 |  | ||||||
|             i += 1 |  | ||||||
|             if limit > 0 and i == limit: |  | ||||||
|                 break |  | ||||||
| 
 |  | ||||||
|         #hashes.sort() |  | ||||||
|         return hashes |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def upcoming(self, limit=4096): |  | ||||||
|         return self.by_state(state=self.QUEUED, limit=limit) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def deferred(self, limit=4096, threshold=None): |  | ||||||
|         return self.by_state(state=self.DEFERRED, limit=limit, threshold=threshold) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def failed(self, limit=4096): |  | ||||||
|         #return self.by_state(state=all_local_errors, limit=limit) |  | ||||||
|         r = [] |  | ||||||
|         r += self.by_state(state=self.LOCAL_ERROR, limit=limit) |  | ||||||
|         r += self.by_state(state=self.NODE_ERROR, limit=limit) |  | ||||||
|         r.sort() |  | ||||||
|         if len(r) > limit: |  | ||||||
|             r = r[:limit] |  | ||||||
|         return r |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def pending(self, limit=4096): |  | ||||||
|         return self.by_state(include_pending=True, limit=limit, strict=True) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def reserve(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         entry.reserve() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def enqueue(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         try: |  | ||||||
|             entry.retry() |  | ||||||
|         except StateTransitionInvalid: |  | ||||||
|             entry.readysend() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def fail(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         logg.debug('fail {}'.format(k)) |  | ||||||
|         entry.sendfail() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def final(self, k, block, tx, error=False): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         if error: |  | ||||||
|             entry.fail(block, tx) |  | ||||||
|         else: |  | ||||||
|             entry.succeed(block, tx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def send_start(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         entry.reserve() |  | ||||||
|         return entry |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def send_end(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         entry.sent() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def is_reserved(self, k): |  | ||||||
|         entry = QueueEntry(self, k) |  | ||||||
|         entry.load() |  | ||||||
|         return entry.test(self.RESERVED) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def sync(self): |  | ||||||
|         self.state_store.sync() |  | ||||||
| @ -1,94 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from leveldir.hex import HexDir |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.error import ( |  | ||||||
|         DuplicateTxError, |  | ||||||
|         NotLocalTxError, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class IndexStore(HexDir): |  | ||||||
| 
 |  | ||||||
|     def __init__(self, root_path, digest_bytes=32): |  | ||||||
|         os.path.join(root_path, 'contents') |  | ||||||
|         self.store = HexDir(root_path, digest_bytes) |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def __exists(self, k): |  | ||||||
|         existing = None |  | ||||||
|         try: |  | ||||||
|             existing = self.get(k) |  | ||||||
|         except NotLocalTxError: |  | ||||||
|             pass |  | ||||||
|         return existing != None |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def put(self, k, v): |  | ||||||
|         kb = bytes.fromhex(k) |  | ||||||
|         vb = v.encode('utf-8') |  | ||||||
|         if self.__exists(k): |  | ||||||
|             raise DuplicateTxError(k) |  | ||||||
|         self.store.add(kb, vb) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def get(self, k): |  | ||||||
|         fp = self.store.to_filepath(k) |  | ||||||
|         f = None |  | ||||||
|         err = None |  | ||||||
|         try: |  | ||||||
|             f = open(fp, 'rb') |  | ||||||
|         except FileNotFoundError as e: |  | ||||||
|             err = e |  | ||||||
|         if err != None: |  | ||||||
|             raise NotLocalTxError(err) |  | ||||||
|         v = f.read() |  | ||||||
|         f.close() |  | ||||||
|         return v.decode('utf-8') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class CounterStore: |  | ||||||
| 
 |  | ||||||
|     def __init__(self, root_path): |  | ||||||
|         try: |  | ||||||
|             os.stat(root_path) |  | ||||||
|         except FileNotFoundError: |  | ||||||
|             os.makedirs(root_path) |  | ||||||
| 
 |  | ||||||
|         fp = os.path.join(root_path, '.counter') |  | ||||||
|         f = None |  | ||||||
|         try: |  | ||||||
|             f = open(fp, 'rb+') |  | ||||||
|         except FileNotFoundError: |  | ||||||
|             logg.debug('counter not found, creating new in {}'.format(fp)) |  | ||||||
|             f = open(fp, 'wb+') |  | ||||||
|             f.write(b'\x00' * 8) |  | ||||||
|             f.close() |  | ||||||
|             f = open(fp, 'rb+') |  | ||||||
|      |  | ||||||
|         v = f.read(8) |  | ||||||
|         self.count = int.from_bytes(v, byteorder='big') |  | ||||||
|         logg.debug('counter starts at {}'.format(self.count)) |  | ||||||
|      |  | ||||||
|         f.seek(0) |  | ||||||
| 
 |  | ||||||
|         self.f = f |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def __del__(self): |  | ||||||
|         self.f.close() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def next(self): |  | ||||||
|         c = self.count |  | ||||||
|         self.count += 1 |  | ||||||
|         v = self.count.to_bytes(8, 'big') |  | ||||||
|         self.f.write(v) |  | ||||||
|         self.f.seek(0) |  | ||||||
|         return c |  | ||||||
| @ -1,5 +1,8 @@ | |||||||
| hexathon~=0.1.7 | pysha3==1.0.2 | ||||||
| leveldir~=0.3.0 | hexathon~=0.1.0 | ||||||
| confini~=0.6.1 | leveldir~=0.1.0 | ||||||
| chainlib~=0.4.0 | alembic==1.4.2 | ||||||
| shep~=0.3.0 | SQLAlchemy==1.3.20 | ||||||
|  | confini~=0.5.1 | ||||||
|  | pyxdg~=0.27 | ||||||
|  | chainlib~=0.0.12 | ||||||
|  | |||||||
							
								
								
									
										25
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										25
									
								
								setup.cfg
									
									
									
									
									
								
							| @ -1,40 +1,41 @@ | |||||||
| [metadata] | [metadata] | ||||||
| name = chainqueue | name = chainqueue | ||||||
| version = 0.2.4 | version = 0.0.6a3 | ||||||
| description = Generic blockchain transaction queue control | description = Generic blockchain transaction queue control | ||||||
| author = Louis Holbrook | author = Louis Holbrook | ||||||
| author_email = dev@holbrook.no | author_email = dev@holbrook.no | ||||||
| url = https://git.defalslfy.org/chainqueue,git | url = https://gitlab.com/chaintools/chainqueue | ||||||
| keywords = | keywords = | ||||||
| 	dlt | 	cic | ||||||
| 	cryptocurrency | 	cryptocurrency | ||||||
| 	ethereum | 	ethereum | ||||||
|  | 	solidarity | ||||||
|  | 	mutual_credit | ||||||
| classifiers = | classifiers = | ||||||
| 	Programming Language :: Python :: 3 | 	Programming Language :: Python :: 3 | ||||||
| 	Operating System :: OS Independent | 	Operating System :: OS Independent | ||||||
| 	Development Status :: 3 - Alpha | 	Development Status :: 3 - Alpha | ||||||
| 	Environment :: Console | 	Environment :: Console | ||||||
| 	Intended Audience :: Developers | 	Intended Audience :: Developers | ||||||
| 	License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) | 	License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+) | ||||||
| 	Topic :: Internet | 	Topic :: Internet | ||||||
| #	Topic :: Blockchain :: EVM | #	Topic :: Blockchain :: EVM | ||||||
| license = OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+) | license = GPL3 | ||||||
| licence_files = | licence_files = | ||||||
| 	LICENSE | 	LICENSE.txt | ||||||
| 
 | 
 | ||||||
| [options] | [options] | ||||||
| python_requires = >= 3.7 | python_requires = >= 3.6 | ||||||
| include_package_data = True | include_package_data = True | ||||||
| packages =  | packages =  | ||||||
| 	chainqueue | 	chainqueue | ||||||
| 	chainqueue.cache | 	chainqueue.db | ||||||
|  | 	chainqueue.db.models | ||||||
|  | 	chainqueue.sql | ||||||
|  | 	chainqueue.adapters | ||||||
| 	chainqueue.unittest | 	chainqueue.unittest | ||||||
| 	chainqueue.store |  | ||||||
| 	chainqueue.runnable | 	chainqueue.runnable | ||||||
| 	chainqueue.cli |  | ||||||
| 	chainqueue.data |  | ||||||
| 
 | 
 | ||||||
| [options.entry_points] | [options.entry_points] | ||||||
| console_scripts = | console_scripts = | ||||||
| 	chainqueue-list = chainqueue.runnable.list:main | 	chainqueue-list = chainqueue.runnable.list:main | ||||||
| 	chainqueue-state = chainqueue.runnable.state:main |  | ||||||
|  | |||||||
							
								
								
									
										16
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								setup.py
									
									
									
									
									
								
							| @ -12,11 +12,23 @@ while True: | |||||||
|     requirements.append(l.rstrip()) |     requirements.append(l.rstrip()) | ||||||
| f.close() | f.close() | ||||||
| 
 | 
 | ||||||
| postgres_requirements = ['psycopg2==2.8.6'] + requirements | test_requirements = [] | ||||||
| sqlite_requirements = [] + requirements | f = open('test_requirements.txt', 'r') | ||||||
|  | while True: | ||||||
|  |     l = f.readline() | ||||||
|  |     if l == '': | ||||||
|  |         break | ||||||
|  |     test_requirements.append(l.rstrip()) | ||||||
|  | f.close() | ||||||
| 
 | 
 | ||||||
|  | postgres_requirements = [ | ||||||
|  |     'psycopg2==2.8.6', | ||||||
|  |         ] + requirements | ||||||
|  | sqlite_requirements = [ | ||||||
|  |         ] + requirements | ||||||
| setup( | setup( | ||||||
|         install_requires=requirements, |         install_requires=requirements, | ||||||
|  |         tests_require=test_requirements, | ||||||
|         extras_require={ |         extras_require={ | ||||||
|             'postgres': postgres_requirements, |             'postgres': postgres_requirements, | ||||||
|             'sqlite': sqlite_requirements, |             'sqlite': sqlite_requirements, | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								test_requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								test_requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1 @@ | |||||||
|  | chainlib==0.0.9a10 | ||||||
| @ -1,39 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import tempfile |  | ||||||
| import unittest |  | ||||||
| import shutil |  | ||||||
| import logging |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from shep.store.file import SimpleFileStoreFactory |  | ||||||
| from chainlib.chain import ChainSpec |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue import ( |  | ||||||
|         Store, |  | ||||||
|         Status, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| # test imports |  | ||||||
| from tests.common import ( |  | ||||||
|         MockCounter, |  | ||||||
|         MockContentStore, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| logg = logging.getLogger(__name__) |  | ||||||
| 
 |  | ||||||
| class TestShepBase(unittest.TestCase): |  | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         self.path = tempfile.mkdtemp() |  | ||||||
|         factory = SimpleFileStoreFactory(self.path).add |  | ||||||
|         self.state = Status(factory) |  | ||||||
|         content_store = MockContentStore() |  | ||||||
|         counter = MockCounter() |  | ||||||
|         chain_spec = ChainSpec('foo', 'bar', 42, 'baz') |  | ||||||
|         self.store = Store(chain_spec, self.state, content_store, counter) |  | ||||||
|         logg.debug('using path {}'.format(self.path)) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def tearDown(self): |  | ||||||
|         shutil.rmtree(self.path) |  | ||||||
							
								
								
									
										103
									
								
								tests/common.py
									
									
									
									
									
								
							
							
						
						
									
										103
									
								
								tests/common.py
									
									
									
									
									
								
							| @ -1,103 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import hashlib |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.cache import ( |  | ||||||
|         Cache, |  | ||||||
|         CacheTokenTx, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class MockCounter: |  | ||||||
| 
 |  | ||||||
|     def __init__(self): |  | ||||||
|         self.c = 0 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def next(self): |  | ||||||
|         c = self.c |  | ||||||
|         self.c += 1 |  | ||||||
|         return c |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class MockTokenCache(Cache): |  | ||||||
| 
 |  | ||||||
|     def __init__(self): |  | ||||||
|         self.db = {} |  | ||||||
|         self.last_filter = None |  | ||||||
| 
 |  | ||||||
|     def put(self, chain_spec, cache_tx): |  | ||||||
|         self.db[cache_tx.hash] = cache_tx |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def get(self, chain_spec, tx_hash): |  | ||||||
|         return self.db[tx_hash] |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def by_nonce(self, cache_filter): |  | ||||||
|         self.last_filter = cache_filter |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def by_date(self, cache_filter=None): |  | ||||||
|         self.last_filter = cache_filter |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def count(self, cache_filter):  |  | ||||||
|         self.last_filter = cache_filter |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class MockCacheTokenTx(CacheTokenTx): |  | ||||||
| 
 |  | ||||||
|     def deserialize(self, signed_tx): |  | ||||||
|         h = hashlib.sha1() |  | ||||||
|         try: |  | ||||||
|             h.update(signed_tx + b'\x01') |  | ||||||
|         except TypeError: |  | ||||||
|             h.update(signed_tx.encode('utf-8') + b'\x01') |  | ||||||
|         z = h.digest() |  | ||||||
|         nonce = int.from_bytes(z[:4], 'big') |  | ||||||
|         token_value = int.from_bytes(z[4:8], 'big') |  | ||||||
|         value = int.from_bytes(z[8:12], 'big') |  | ||||||
|          |  | ||||||
|         h = hashlib.sha1() |  | ||||||
|         h.update(z) |  | ||||||
|         z = h.digest() |  | ||||||
|         sender = z.hex() |  | ||||||
| 
 |  | ||||||
|         h = hashlib.sha1() |  | ||||||
|         h.update(z) |  | ||||||
|         z = h.digest() |  | ||||||
|         recipient = z.hex() |  | ||||||
| 
 |  | ||||||
|         h = hashlib.sha1() |  | ||||||
|         h.update(z) |  | ||||||
|         z = h.digest() |  | ||||||
|         token = z.hex() |  | ||||||
| 
 |  | ||||||
|         h = hashlib.sha256() |  | ||||||
|         h.update(z) |  | ||||||
|         z = h.digest() |  | ||||||
|         tx_hash = z.hex() |  | ||||||
| 
 |  | ||||||
|         self.init(tx_hash, nonce, sender, recipient, value) |  | ||||||
|         self.set('src_token', token) |  | ||||||
|         self.set('dst_token', token) |  | ||||||
|         self.set('src_value', token_value) |  | ||||||
|         self.set('dst_value', token_value) |  | ||||||
|         self.confirm(42, 13, 1024000) |  | ||||||
| 
 |  | ||||||
|         return self |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class MockContentStore: |  | ||||||
| 
 |  | ||||||
|     def __init__(self): |  | ||||||
|         self.store = {} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def put(self, k, v): |  | ||||||
|         self.store[k] = v |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def get(self, k): |  | ||||||
|         return self.store.get(k) |  | ||||||
| @ -1,95 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| import unittest |  | ||||||
| import math |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from hexathon import add_0x |  | ||||||
| from chainlib.chain import ChainSpec |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue import QueueEntry |  | ||||||
| from chainqueue.cache import ( |  | ||||||
|         CacheTokenTx, |  | ||||||
|         CacheFilter, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| # test imports |  | ||||||
| from tests.base_shep import TestShepBase |  | ||||||
| from tests.common import ( |  | ||||||
|         MockTokenCache, |  | ||||||
|         MockCacheTokenTx, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| logging.basicConfig(level=logging.DEBUG) |  | ||||||
| logg = logging.getLogger() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class MockNormalizer: |  | ||||||
| 
 |  | ||||||
|     def address(self, v): |  | ||||||
|         return 'address' + v |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def value(self, v): |  | ||||||
|         dv = int(math.log10(v) + 1) |  | ||||||
|         return float(v / (10 ** dv)) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def hash(self, v): |  | ||||||
|         return 'ashbashhash' + v |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class TestCache(TestShepBase): |  | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         super(TestCache, self).setUp() |  | ||||||
|         self.chain_spec = ChainSpec('foo', 'bar', 42, 'baz') |  | ||||||
|         self.cache = MockTokenCache() |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def test_cache_instance(self): |  | ||||||
|         normalizer = MockNormalizer() |  | ||||||
|         a = b'foo' |  | ||||||
|         tx = MockCacheTokenTx(self.chain_spec, normalizer=normalizer) |  | ||||||
|         tx.deserialize(a) |  | ||||||
|         self.assertTrue(isinstance(tx.value, float)) |  | ||||||
|         self.assertEqual(tx.sender[:4], 'addr') |  | ||||||
|         self.assertEqual(tx.recipient[:4], 'addr') |  | ||||||
|         self.assertEqual(tx.hash[:11], 'ashbashhash') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_cache_putget(self): |  | ||||||
|         a = b'foo' |  | ||||||
|         tx = MockCacheTokenTx(self.chain_spec) |  | ||||||
|         tx.deserialize(a) |  | ||||||
|         self.cache.put(self.chain_spec, tx) |  | ||||||
|         tx_retrieved = self.cache.get(self.chain_spec, tx.hash) |  | ||||||
|         self.assertEqual(tx, tx_retrieved) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_cache_filter(self): |  | ||||||
|         normalizer = MockNormalizer() |  | ||||||
|         fltr = CacheFilter(normalizer=normalizer) |  | ||||||
| 
 |  | ||||||
|         sender = os.urandom(20).hex() |  | ||||||
|         fltr.add_senders(sender) |  | ||||||
| 
 |  | ||||||
|         recipient_one = os.urandom(20).hex() |  | ||||||
|         recipient_two = os.urandom(20).hex() |  | ||||||
|         fltr.add_recipients([recipient_one, recipient_two]) |  | ||||||
| 
 |  | ||||||
|         self.assertEqual(fltr.senders[0][:4], 'addr') |  | ||||||
|         self.assertEqual(fltr.recipients[1][:4], 'addr') |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_cache_query(self): |  | ||||||
|         a = os.urandom(20).hex() |  | ||||||
|         fltr = CacheFilter(nonce=42) |  | ||||||
|         self.cache.count(fltr) |  | ||||||
|         self.assertEqual(self.cache.last_filter, fltr) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @ -1,85 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| import unittest |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from hexathon import add_0x |  | ||||||
| from chainlib.tx import Tx |  | ||||||
| from chainlib.block import Block |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue import QueueEntry |  | ||||||
| 
 |  | ||||||
| # test imports |  | ||||||
| from tests.base_shep import TestShepBase |  | ||||||
| from tests.common import MockCacheTokenTx |  | ||||||
| 
 |  | ||||||
| logging.basicConfig(level=logging.DEBUG) |  | ||||||
| logg = logging.getLogger() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class TestEntry(TestShepBase): |  | ||||||
|      |  | ||||||
|     def test_entry_get(self): |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         nonce = 42 |  | ||||||
|         entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx) |  | ||||||
|         tx_hash_one = entry.create(signed_tx) |  | ||||||
| 
 |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         nonce = 42 |  | ||||||
|         entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx) |  | ||||||
|         tx_hash_two = entry.create(signed_tx) |  | ||||||
| 
 |  | ||||||
|         txs = self.store.by_state(include_pending=True) |  | ||||||
|         self.assertEqual(len(txs), 2) |  | ||||||
|       |  | ||||||
|         logg.debug('tx hash one {}'.format(tx_hash_one)) |  | ||||||
|         entry = QueueEntry(self.store, tx_hash=tx_hash_one, cache_adapter=MockCacheTokenTx) |  | ||||||
|         entry.load() |  | ||||||
|         entry.sent() |  | ||||||
|          |  | ||||||
|         txs = self.store.by_state(include_pending=True) |  | ||||||
|         self.assertEqual(len(txs), 1) |  | ||||||
| 
 |  | ||||||
|         txs = self.store.by_state(state=self.store.IN_NETWORK) |  | ||||||
|         self.assertEqual(len(txs), 1) |  | ||||||
| 
 |  | ||||||
|         entry.succeed(None, None) |  | ||||||
|         txs = self.store.by_state(include_pending=True) |  | ||||||
|         self.assertEqual(len(txs), 1) |  | ||||||
|        |  | ||||||
|         entry = QueueEntry(self.store, tx_hash_two) |  | ||||||
|         entry.load() |  | ||||||
|         entry.sent() |  | ||||||
|          |  | ||||||
|         txs = self.store.by_state(state=self.store.IN_NETWORK) |  | ||||||
|         self.assertEqual(len(txs), 2) |  | ||||||
| 
 |  | ||||||
|         txs = self.store.by_state(state=self.store.IN_NETWORK, strict=True) |  | ||||||
|         self.assertEqual(len(txs), 1) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_entry_change(self): |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         nonce = 42 |  | ||||||
|         entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx) |  | ||||||
|         tx_hash = entry.create(signed_tx) |  | ||||||
| 
 |  | ||||||
|         block = Block() |  | ||||||
|         block.number = 13 |  | ||||||
|         tx = Tx(None) |  | ||||||
|         tx.index = 666 |  | ||||||
| 
 |  | ||||||
|         entry.readysend() |  | ||||||
|         entry.reserve() |  | ||||||
|         entry.sendfail() |  | ||||||
| 
 |  | ||||||
|         entry = QueueEntry(self.store, tx_hash, cache_adapter=MockCacheTokenTx) |  | ||||||
|         entry.load() |  | ||||||
|         self.assertEqual(str(entry.tx_hash), tx_hash) |  | ||||||
|         |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @ -1,119 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import tempfile |  | ||||||
| import unittest |  | ||||||
| import logging |  | ||||||
| import time |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from shep.store.file import SimpleFileStoreFactory |  | ||||||
| from chainlib.chain import ChainSpec |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue import ( |  | ||||||
|         Store, |  | ||||||
|         Status, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| # test imports |  | ||||||
| from tests.common import ( |  | ||||||
|         MockCounter, |  | ||||||
|         MockTokenCache, |  | ||||||
|         MockCacheTokenTx, |  | ||||||
|         MockContentStore, |  | ||||||
|         ) |  | ||||||
| from tests.base_shep import TestShepBase |  | ||||||
| 
 |  | ||||||
| logging.basicConfig(level=logging.DEBUG) |  | ||||||
| logg = logging.getLogger() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class TestIntegrateBase(TestShepBase): |  | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         self.path = tempfile.mkdtemp() |  | ||||||
|         factory = SimpleFileStoreFactory(self.path).add |  | ||||||
|         self.state = Status(factory) |  | ||||||
|         content_store = MockContentStore() |  | ||||||
|         counter = MockCounter() |  | ||||||
|         chain_spec = ChainSpec('foo', 'bar', 42, 'baz') |  | ||||||
|         self.cache = MockTokenCache() |  | ||||||
|         self.store = Store(chain_spec, self.state, content_store, counter, cache=self.cache) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_integration_valid(self): |  | ||||||
|         self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def test_state_default(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         v = self.store.pending() |  | ||||||
|         self.assertEqual(len(v), 1) |  | ||||||
|         self.assertEqual(v[0], hx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_state_enqueue(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.get(hx) |  | ||||||
|         self.store.enqueue(hx) |  | ||||||
|         v = self.store.upcoming() |  | ||||||
|         self.assertEqual(len(v), 1) |  | ||||||
|         v = self.store.pending() |  | ||||||
|         self.assertEqual(len(v), 0) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_state_defer(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
|         v = self.store.deferred() |  | ||||||
|         self.assertEqual(len(v), 1) |  | ||||||
|         self.assertEqual(v[0], hx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_state_multiple(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
|        |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
|         v = self.store.deferred() |  | ||||||
|         self.assertEqual(len(v), 2) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_state_multiple_sort(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
|         |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.enqueue(hx) |  | ||||||
|          |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
| 
 |  | ||||||
|         self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         v = self.store.deferred() |  | ||||||
|         self.assertEqual(len(v), 2) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_state_date_threshold(self): |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
|         then = self.store.modified(s) |  | ||||||
|         time.sleep(0.1) |  | ||||||
| 
 |  | ||||||
|         (s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx) |  | ||||||
|         self.store.reserve(hx) |  | ||||||
|         self.store.fail(hx) |  | ||||||
| 
 |  | ||||||
|         v = self.store.deferred(threshold=then) |  | ||||||
|         self.assertEqual(len(v), 1) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @ -345,64 +345,6 @@ class TestTxQuery(TestTxBase): | |||||||
|         txs = get_account_tx(self.chain_spec, self.alice, as_sender=True, as_recipient=False, not_status=StatusBits.QUEUED, status=StatusBits.IN_NETWORK, session=self.session) |         txs = get_account_tx(self.chain_spec, self.alice, as_sender=True, as_recipient=False, not_status=StatusBits.QUEUED, status=StatusBits.IN_NETWORK, session=self.session) | ||||||
|         self.assertEqual(len(txs.keys()), 1) |         self.assertEqual(len(txs.keys()), 1) | ||||||
| 
 | 
 | ||||||
|     def test_latest_txs(self): |  | ||||||
| 
 |  | ||||||
|         nonce_hashes = [self.tx_hash] |  | ||||||
|         tx_hash = add_0x(os.urandom(32).hex()) |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         create( |  | ||||||
|                 self.chain_spec, |  | ||||||
|                 42, |  | ||||||
|                 self.alice, |  | ||||||
|                 tx_hash, |  | ||||||
|                 signed_tx, |  | ||||||
|                 session=self.session, |  | ||||||
|                 ) |  | ||||||
|         txc = TxCache( |  | ||||||
|                 tx_hash, |  | ||||||
|                 self.alice, |  | ||||||
|                 self.bob, |  | ||||||
|                 self.foo_token, |  | ||||||
|                 self.bar_token, |  | ||||||
|                 self.from_value, |  | ||||||
|                 self.to_value, |  | ||||||
|                 session=self.session, |  | ||||||
|                 ) |  | ||||||
|         self.session.add(txc) |  | ||||||
|         self.session.commit() |  | ||||||
| 
 |  | ||||||
|         nonce_hashes.append(tx_hash) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|         tx_hash = add_0x(os.urandom(32).hex()) |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         create( |  | ||||||
|                 self.chain_spec, |  | ||||||
|                 41, |  | ||||||
|                 self.alice, |  | ||||||
|                 tx_hash, |  | ||||||
|                 signed_tx, |  | ||||||
|                 session=self.session, |  | ||||||
|                 ) |  | ||||||
|         txc = TxCache( |  | ||||||
|                 tx_hash, |  | ||||||
|                 self.alice, |  | ||||||
|                 self.bob, |  | ||||||
|                 self.foo_token, |  | ||||||
|                 self.bar_token, |  | ||||||
|                 self.from_value, |  | ||||||
|                 self.to_value, |  | ||||||
|                 session=self.session, |  | ||||||
|                 ) |  | ||||||
|         self.session.add(txc) |  | ||||||
| 
 |  | ||||||
|         nonce_hashes.append(tx_hash) |  | ||||||
| 
 |  | ||||||
|         txs = get_latest_txs(self.chain_spec, session=self.session) |  | ||||||
|         self.assertEqual(len(txs.keys()), 3) |  | ||||||
| 
 |  | ||||||
|         txs = get_latest_txs(self.chain_spec, count=1, session=self.session) |  | ||||||
|         self.assertEqual(len(txs.keys()), 1) |  | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @ -1,60 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import logging |  | ||||||
| import unittest |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from hexathon import ( |  | ||||||
|         add_0x, |  | ||||||
|         strip_0x, |  | ||||||
|         ) |  | ||||||
| from shep.error import StateTransitionInvalid |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue import QueueEntry |  | ||||||
| 
 |  | ||||||
| # test imports |  | ||||||
| from tests.base_shep import TestShepBase |  | ||||||
| from tests.common import MockCacheTokenTx |  | ||||||
| 
 |  | ||||||
| logging.basicConfig(level=logging.DEBUG) |  | ||||||
| logg = logging.getLogger() |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| class TestShep(TestShepBase): |  | ||||||
|     |  | ||||||
|     def test_shep_setup(self): |  | ||||||
|         pass |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_shep_tx(self): |  | ||||||
|         signed_tx = add_0x(os.urandom(128).hex()) |  | ||||||
|         nonce = 42 |  | ||||||
|         tx = QueueEntry(self.store, cache_adapter=MockCacheTokenTx) |  | ||||||
|         tx_hash = tx.create(signed_tx) |  | ||||||
| 
 |  | ||||||
|         tx_retrieved = QueueEntry(self.store, tx_hash=tx_hash) |  | ||||||
|         tx_retrieved.load() |  | ||||||
|         self.assertEqual(tx_retrieved.signed_tx, strip_0x(signed_tx)) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_shep_valid(self): |  | ||||||
|         self.state.put('foo', 'bar') |  | ||||||
|         self.state.set('foo', self.state.IN_NETWORK) |  | ||||||
|         self.state.set('foo', self.state.FINAL) |  | ||||||
|      |  | ||||||
| 
 |  | ||||||
|     def test_shep_invalid(self): |  | ||||||
|         self.state.put('foo', 'bar') |  | ||||||
|         self.state.set('foo', self.state.FINAL) |  | ||||||
|         with self.assertRaises(StateTransitionInvalid): |  | ||||||
|             self.state.move('foo', self.state.INSUFFICIENT_FUNDS) |  | ||||||
|     |  | ||||||
| 
 |  | ||||||
|     def test_shep_cache(self): |  | ||||||
|         self.store.put('bar', cache_adapter=MockCacheTokenTx) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @ -1,104 +0,0 @@ | |||||||
| # standard imports |  | ||||||
| import os |  | ||||||
| import tempfile |  | ||||||
| import unittest |  | ||||||
| import logging |  | ||||||
| import shutil |  | ||||||
| 
 |  | ||||||
| # external imports |  | ||||||
| from chainlib.chain import ChainSpec |  | ||||||
| from shep.store.noop import NoopStoreFactory |  | ||||||
| 
 |  | ||||||
| # local imports |  | ||||||
| from chainqueue.store.fs import ( |  | ||||||
|         IndexStore, |  | ||||||
|         CounterStore, |  | ||||||
|         ) |  | ||||||
| from chainqueue.store.base import Store |  | ||||||
| from chainqueue.error import DuplicateTxError |  | ||||||
| from chainqueue.state import Status |  | ||||||
| 
 |  | ||||||
| # tests imports |  | ||||||
| from tests.common import ( |  | ||||||
|         MockTokenCache, |  | ||||||
|         MockCacheTokenTx, |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
| logging.basicConfig(level=logging.DEBUG) |  | ||||||
| logg = logging.getLogger() |  | ||||||
| 
 |  | ||||||
| class TestStoreImplementations(unittest.TestCase): |  | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         self.path = tempfile.mkdtemp() |  | ||||||
|        |  | ||||||
| 
 |  | ||||||
|     def tearDown(self): |  | ||||||
|         shutil.rmtree(self.path) |  | ||||||
| 
 |  | ||||||
|      |  | ||||||
|     def test_basic_index(self): |  | ||||||
|         store = IndexStore(self.path) |  | ||||||
|         hx = os.urandom(32).hex() |  | ||||||
|         data = 'foo_bar_baz' |  | ||||||
|         store.put(hx, data) |  | ||||||
|         r = store.get(hx) |  | ||||||
|         self.assertEqual(data, r) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_basic_counter(self): |  | ||||||
|         store = CounterStore(self.path) |  | ||||||
|         v = store.next() |  | ||||||
|         self.assertEqual(v, 0) |  | ||||||
|         v = store.next() |  | ||||||
|         self.assertEqual(v, 1) |  | ||||||
| 
 |  | ||||||
|         store = CounterStore(self.path) |  | ||||||
|         v = store.next() |  | ||||||
|         self.assertEqual(v, 2) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_duplicate(self): |  | ||||||
|         store = IndexStore(self.path) |  | ||||||
|         hx = os.urandom(32).hex() |  | ||||||
|         data = 'foo_bar_baz' |  | ||||||
|         store.put(hx, data) |  | ||||||
|         with self.assertRaises(DuplicateTxError): |  | ||||||
|             store.put(hx, data) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     def test_upcoming_limit(self): |  | ||||||
|         index_store = IndexStore(self.path) |  | ||||||
|         counter_store = CounterStore(self.path) |  | ||||||
|         chain_spec = ChainSpec('foo', 'bar', 42, 'baz')  |  | ||||||
|         factory = NoopStoreFactory().add |  | ||||||
|         state_store = Status(factory) |  | ||||||
|         cache_store = MockTokenCache() |  | ||||||
|         queue_store = Store(chain_spec, state_store, index_store, counter_store, cache=cache_store) |  | ||||||
| 
 |  | ||||||
|         txs = [] |  | ||||||
|         for i in range(3): |  | ||||||
|             tx_src = os.urandom(128).hex() |  | ||||||
|             tx = queue_store.put(tx_src, cache_adapter=MockCacheTokenTx) |  | ||||||
|             txs.append(tx) |  | ||||||
| 
 |  | ||||||
|         r = queue_store.upcoming(limit=3) |  | ||||||
|         self.assertEqual(len(r), 0) |  | ||||||
| 
 |  | ||||||
|         for tx in txs: |  | ||||||
|             queue_store.enqueue(tx[1]) |  | ||||||
| 
 |  | ||||||
|         r = queue_store.upcoming(limit=3) |  | ||||||
|         self.assertEqual(len(r), 3) |  | ||||||
| 
 |  | ||||||
|         queue_store.send_start(txs[0][1]) |  | ||||||
|         r = queue_store.upcoming(limit=3) |  | ||||||
|         self.assertEqual(len(r), 2) |  | ||||||
| 
 |  | ||||||
|         queue_store.send_end(txs[0][1]) |  | ||||||
|         r = queue_store.upcoming(limit=3) |  | ||||||
|         self.assertEqual(len(r), 2) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user