mirror of
				git://holbrook.no/eth-monitor.git
				synced 2025-10-31 10:04:13 +01:00 
			
		
		
		
	Add tx by address lister cli
This commit is contained in:
		
							parent
							
								
									c760c3800c
								
							
						
					
					
						commit
						5127561b1f
					
				| @ -1,3 +1,7 @@ | ||||
| - 0.1.0 | ||||
| 	* Read blocks, tx, rcpt from cache | ||||
| 	* Read include and exclude address lists from cli options | ||||
| 	* Cumulative output rendering | ||||
| - 0.0.7 | ||||
| 	* Remove forced execution with renderers | ||||
| - 0.0.6 | ||||
|  | ||||
							
								
								
									
										58
									
								
								eth_monitor/index.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								eth_monitor/index.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,58 @@ | ||||
| # standard imports | ||||
| import logging | ||||
| import json | ||||
| 
 | ||||
| # externa imports | ||||
| from hexathon import ( | ||||
|         uniform as hex_uniform, | ||||
|         strip_0x, | ||||
|         ) | ||||
| 
 | ||||
| logg = logging.getLogger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class AddressIndex: | ||||
| 
 | ||||
|     def __init__(self, rpc, store): | ||||
|         self.rpc = rpc | ||||
|         self.store = store | ||||
|         self.addresses = {} | ||||
| 
 | ||||
| 
 | ||||
|     def load_address_tx(self, address): | ||||
|         address = hex_uniform(strip_0x(address)) | ||||
|         if self.addresses.get(address) == None: | ||||
|             self.addresses[address] = [] | ||||
|         txs = {} | ||||
|         for tx_hash in self.store.get_address_tx(address): | ||||
|             j = self.store.get_tx(tx_hash) | ||||
|             tx = json.loads(j) | ||||
|             logg.debug('tx {}'.format(tx)) | ||||
| 
 | ||||
|             block_number = None | ||||
|             try: | ||||
|                 block_number = int(tx['block_number'], 16) | ||||
|             except:  | ||||
|                 block_number = int(tx['block_number']) | ||||
| 
 | ||||
|             tx_index = None | ||||
|             try: | ||||
|                 tx_index = int(tx['transaction_index'], 16) | ||||
|             except:  | ||||
|                 tx_index = int(tx['transaction_index']) | ||||
| 
 | ||||
|             k = '{}.{}'.format(block_number, tx_index) | ||||
| 
 | ||||
|             txs[k] = tx | ||||
| 
 | ||||
|         ks = list(txs.keys()) | ||||
|         ks.sort() | ||||
|         for k in ks: | ||||
|             self.addresses[address].append(txs[k]) | ||||
|          | ||||
|         return len(ks) | ||||
| 
 | ||||
| 
 | ||||
|     def get_address(self, address): | ||||
|         address = hex_uniform(strip_0x(address)) | ||||
|         return self.addresses[address] | ||||
							
								
								
									
										81
									
								
								eth_monitor/rpc.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								eth_monitor/rpc.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,81 @@ | ||||
| # standard imports | ||||
| import json | ||||
| from jsonrpc_std.parse import jsonrpc_from_dict | ||||
| import logging | ||||
| 
 | ||||
| # external imports | ||||
| from hexathon import strip_0x | ||||
| 
 | ||||
| logg = logging.getLogger(__name__) | ||||
| 
 | ||||
| class CacheRPC: | ||||
| 
 | ||||
|     def __init__(self, rpc, store): | ||||
|         self.rpc = rpc | ||||
|         self.store = store | ||||
| 
 | ||||
| 
 | ||||
|     def do(self, o): | ||||
|         req = jsonrpc_from_dict(o) | ||||
|         r = None | ||||
|         if req['method'] == 'eth_getBlockByNumber': | ||||
|             block_number = req['params'][0] | ||||
|             v = int(strip_0x(block_number), 16) | ||||
|             try: | ||||
|                 j = self.store.get_block_number(v) | ||||
|                 r = json.loads(j) | ||||
|                 logg.debug('using cached block {} -> {}'.format(v, r['hash'])) | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|         elif req['method'] == 'eth_getBlockByHash': | ||||
|             block_hash = req['params'][0] | ||||
|             v = strip_0x(block_hash) | ||||
|             try: | ||||
|                 j = self.store.get_block(v) | ||||
|                 r = json.loads(j) | ||||
|                 logg.debug('using cached block {}'.format(r['hash'])) | ||||
|             except FileNotFoundError as e: | ||||
|                 logg.debug('not found {}'.format(e)) | ||||
|                 pass | ||||
|         elif req['method'] == 'eth_getTransactionReceipt': | ||||
|             tx_hash = req['params'][0] | ||||
|             j = None | ||||
|             try: | ||||
|                 tx_hash = strip_0x(tx_hash) | ||||
|                 j = self.store.get_rcpt(tx_hash) | ||||
|                 r = json.loads(j) | ||||
|                 logg.debug('using cached rcpt {}'.format(tx_hash)) | ||||
|             except FileNotFoundError as e: | ||||
|                 logg.debug('no file {}'.format(e)) | ||||
|                 pass | ||||
|                  | ||||
| #        elif req['method'] == 'eth_getTransactionByHash': | ||||
| #            raise ValueError(o) | ||||
| #        elif req['method'] == 'eth_getTransactionByBlockHashAndIndex': | ||||
| #            logg.debug('trying tx index {}'.format(o)) | ||||
| #            v = req['params'][0] | ||||
| #            j = None | ||||
| #            try: | ||||
| #                j = self.store.get_block(v) | ||||
| #            except FileNotFoundError: | ||||
| #                pass | ||||
| #                | ||||
| #            if j != None: | ||||
| #                o = json.loads(j) | ||||
| #                idx = int(req['params'][1], 16) | ||||
| #                v = r['transactions'][idx]  | ||||
| #                j = None | ||||
| #                try: | ||||
| #                    j = self.store.get_tx(v) | ||||
| #                except FileNotFoundError: | ||||
| #                    pass | ||||
| # | ||||
| #                if j != None: | ||||
| #                    r = json.loads(j) | ||||
| #                    logg.debug('using cached tx {} -> {}'.format(req['params'], r['hash'])) | ||||
| 
 | ||||
|         if r == None: | ||||
|             logg.debug('passthru {}'.format(o)) | ||||
|             r = self.rpc.do(o) | ||||
| 
 | ||||
|         return r | ||||
| @ -14,7 +14,10 @@ from chainlib.chain import ChainSpec | ||||
| from eth_monitor.filters.cache import Filter as CacheFilter | ||||
| from eth_monitor.filters import RuledFilter | ||||
| from eth_monitor.store.file import FileStore | ||||
| from eth_monitor.rules import AddressRules | ||||
| from eth_monitor.rules import ( | ||||
|         AddressRules, | ||||
|         RuleSimple, | ||||
|         ) | ||||
| 
 | ||||
| logging.basicConfig(level=logging.WARNING) | ||||
| logg = logging.getLogger() | ||||
| @ -88,8 +91,15 @@ def collect_addresses(addresses=[], address_files=[]): | ||||
| 
 | ||||
| def setup_address_rules(addresses): | ||||
|     rules = AddressRules() | ||||
|     outputs = [] | ||||
|     inputs = [] | ||||
|     execs = [] | ||||
|     for address in addresses: | ||||
|         rules.include(sender=address, recipient=address) | ||||
|         outputs.append(address) | ||||
|         inputs.append(address) | ||||
|         execs.append(address) | ||||
|     rule = RuleSimple(outputs, inputs, execs, description='etherscan import') | ||||
|     rules.include(rule) | ||||
|     return rules | ||||
| 
 | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										118
									
								
								eth_monitor/runnable/list.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										118
									
								
								eth_monitor/runnable/list.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,118 @@ | ||||
| # standard imports | ||||
| import sys | ||||
| import argparse | ||||
| import confini | ||||
| import logging | ||||
| import os | ||||
| import importlib | ||||
| 
 | ||||
| # external imports | ||||
| from chainlib.chain import ChainSpec | ||||
| from chainlib.eth.connection import EthHTTPConnection | ||||
| from chainlib.eth.block import ( | ||||
|         block_by_hash, | ||||
|         Block, | ||||
|         ) | ||||
| from chainlib.eth.tx import ( | ||||
|         receipt, | ||||
|         Tx, | ||||
|         ) | ||||
| 
 | ||||
| # local imports | ||||
| from eth_monitor.store.file import FileStore | ||||
| from eth_monitor.index import AddressIndex | ||||
| from eth_monitor.rpc import CacheRPC | ||||
| from eth_monitor.filters.out import OutFilter | ||||
| from eth_monitor.rules import AddressRules | ||||
|          | ||||
| 
 | ||||
| logging.basicConfig(level=logging.WARNING) | ||||
| logg = logging.getLogger() | ||||
| 
 | ||||
| default_eth_provider = os.environ.get('RPC_PROVIDER') | ||||
| if default_eth_provider == None: | ||||
|     default_eth_provider = os.environ.get('ETH_PROVIDER', 'http://localhost:8545') | ||||
| 
 | ||||
| script_dir = os.path.realpath(os.path.dirname(__file__)) | ||||
| exec_dir = os.path.realpath(os.getcwd()) | ||||
| #default_config_dir = os.environ.get('CONFINI_DIR', os.path.join(exec_dir, 'config')) | ||||
| base_config_dir = os.path.join(script_dir, '..', 'data', 'config') | ||||
| 
 | ||||
| 
 | ||||
| argparser = argparse.ArgumentParser('list transactions') | ||||
| argparser.add_argument('-p', '--provider', dest='p', default=default_eth_provider, type=str, help='Web3 provider url (http only)') | ||||
| argparser.add_argument('-c', type=str, help='config file') | ||||
| argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') | ||||
| argparser.add_argument('--seq', action='store_true', help='Use sequential rpc ids') | ||||
| argparser.add_argument('--output', default=[], action='append', type=str, help='Add output (sender) addresses to includes list') | ||||
| argparser.add_argument('--filter', type=str, action='append', help='Add python module filter path') | ||||
| argparser.add_argument('-v', action='store_true', help='Be verbose') | ||||
| argparser.add_argument('-vv', action='store_true', help='Be more verbose') | ||||
| argparser.add_argument('--fresh', action='store_true', help='Do not read block and tx data from cache, even if available') | ||||
| argparser.add_argument('--renderer', type=str, action='append', default=[], help='Python modules to dynamically load for rendering of transaction output') | ||||
| argparser.add_argument('cache_dir', type=str, help='Directory to read cache data from') | ||||
| args = argparser.parse_args(sys.argv[1:]) | ||||
| 
 | ||||
| 
 | ||||
| if args.vv: | ||||
|     logg.setLevel(logging.DEBUG) | ||||
| elif args.v: | ||||
|     logg.setLevel(logging.INFO) | ||||
| 
 | ||||
| config_dir = args.c | ||||
| config = confini.Config(base_config_dir, os.environ.get('CONFINI_ENV_PREFIX'), override_dirs=args.c) | ||||
| config.process() | ||||
| args_override = { | ||||
|         'CHAIN_SPEC': getattr(args, 'i'), | ||||
|         } | ||||
| config.dict_override(args_override, 'cli') | ||||
| config.add(getattr(args, 'cache_dir'), '_CACHE_DIR') | ||||
| logg.debug('loaded config:\{}'.format(config)) | ||||
| 
 | ||||
| chain_spec = ChainSpec.from_chain_str(args.i) | ||||
| 
 | ||||
| rpc_id_generator = None | ||||
| if args.seq: | ||||
|     rpc_id_generator = IntSequenceGenerator() | ||||
| 
 | ||||
| auth = None | ||||
| if os.environ.get('RPC_AUTHENTICATION') == 'basic': | ||||
|     from chainlib.auth import BasicAuth | ||||
|     auth = BasicAuth(os.environ['RPC_USERNAME'], os.environ['RPC_PASSWORD']) | ||||
| rpc = EthHTTPConnection(args.p) | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| def main(): | ||||
|     store = FileStore(chain_spec, config.get('_CACHE_DIR')) | ||||
|     use_rpc = rpc | ||||
|     if not args.fresh: | ||||
|         use_rpc = CacheRPC(rpc, store) | ||||
| 
 | ||||
|     renderers_mods = [] | ||||
|     for renderer in args.renderer: | ||||
|         m = importlib.import_module(renderer) | ||||
|         renderers_mods.append(m) | ||||
| 
 | ||||
|     idx = AddressIndex(rpc, store) | ||||
| 
 | ||||
|     for address in args.output: | ||||
|         idx.load_address_tx(address) | ||||
| 
 | ||||
|     OutFilter.init(store) | ||||
|     out_filter = OutFilter(chain_spec, renderers=renderers_mods) | ||||
| 
 | ||||
|     for tx_src in idx.get_address(address): | ||||
|         o = block_by_hash(tx_src['block_hash']) | ||||
|         block_src = use_rpc.do(o) | ||||
| 
 | ||||
|         o = receipt(tx_src['hash']) | ||||
|         rcpt = use_rpc.do(o) | ||||
|      | ||||
|         block = Block(block_src) | ||||
|         tx = Tx(tx_src, block=block, rcpt=rcpt) | ||||
|         out_filter.filter(use_rpc, block, tx, db_session=None) | ||||
|      | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     main() | ||||
| @ -106,18 +106,6 @@ config.add(args.single, '_SINGLE', True) | ||||
| config.add(args.head, '_HEAD', True) | ||||
| logg.debug('loaded config:\{}'.format(config)) | ||||
| 
 | ||||
| block_offset = 0 | ||||
| if args.head: | ||||
|     block_offset = -1 | ||||
| else: | ||||
|     block_offset = args.offset | ||||
| 
 | ||||
| block_limit = 0 | ||||
| if args.until > 0: | ||||
|     if not args.head and args.until <= block_offset: | ||||
|         raise ValueError('sync termination block number must be later than offset ({} >= {})'.format(block_offset, args.until)) | ||||
|     block_limit = args.until | ||||
| 
 | ||||
| logg.debug('config loaded:\n{}'.format(config)) | ||||
| 
 | ||||
| chain_spec = ChainSpec.from_chain_str(args.i) | ||||
| @ -274,7 +262,8 @@ def setup_backend_resume(chain_spec, block_offset, block_limit, state_dir, callb | ||||
| 
 | ||||
| 
 | ||||
| def setup_backend_single(chain_spec, block_offset, block_limit, state_dir, callback, chain_interface, sync_offset=0, skip_history=False): | ||||
|     syncer_backend = FileBackend.initial(chain_spec, block_offset, start_block_height=sync_offset, base_dir=state_dir) | ||||
|     logg.debug('block limit {}'.format(block_limit)) | ||||
|     syncer_backend = FileBackend.initial(chain_spec, block_limit, start_block_height=sync_offset, base_dir=state_dir) | ||||
|     syncer = HistorySyncer(syncer_backend, chain_interface, block_callback=callback) | ||||
|     return [syncer] | ||||
| 
 | ||||
| @ -286,18 +275,30 @@ def setup_backend_head(chain_spec, block_offset, block_limit, state_dir, callbac | ||||
| 
 | ||||
| 
 | ||||
| def main(): | ||||
|     global block_limit | ||||
|     session_block_offset = 0 | ||||
|     if args.head: | ||||
|         session_block_offset = -1 | ||||
|     else: | ||||
|         session_block_offset = args.offset | ||||
| 
 | ||||
|     block_limit = 0 | ||||
|     if args.until > 0: | ||||
|         if not args.head and args.until <= block_offset: | ||||
|             raise ValueError('sync termination block number must be later than offset ({} >= {})'.format(block_offset, args.until)) | ||||
|         block_limit = args.until | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|     o = block_latest() | ||||
|     r = rpc.do(o) | ||||
|     block_offset = int(strip_0x(r), 16) + 1 | ||||
|     logg.info('network block height is {}'.format(block_offset)) | ||||
| 
 | ||||
|     if block_offset == -1: | ||||
|         block_offset = block_latest | ||||
|     if session_block_offset == -1: | ||||
|         session_block_offset = block_offset | ||||
|     elif not config.true('_KEEP_ALIVE'): | ||||
|         if block_limit == 0: | ||||
|             block_limit = block_latest | ||||
|             block_limit = block_offset | ||||
| 
 | ||||
|     address_rules = AddressRules(include_by_default=args.include_default) | ||||
|     address_rules = setup_address_file_rules( | ||||
|  | ||||
| @ -103,6 +103,17 @@ class FileStore: | ||||
|         f.close() | ||||
|         return r | ||||
| 
 | ||||
| 
 | ||||
|     def get_address_tx(self, address): | ||||
|         fp = self.address_dir.to_filepath(address) | ||||
|         tx_hashes = [] | ||||
|         for tx_hash in os.listdir(fp): | ||||
|             if tx_hash[0] == '.': | ||||
|                 continue | ||||
|             tx_hashes.append(tx_hash) | ||||
|         return tx_hashes | ||||
| 
 | ||||
| 
 | ||||
|     def __init__(self, chain_spec, cache_root=base_dir, address_rules=None): | ||||
|         self.cache_root = os.path.join( | ||||
|             cache_root, | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| chainlib-eth~=0.0.28 | ||||
| chainlib-eth>=0.1.0b1,<=0.1.0 | ||||
| chainlib~=0.0.23 | ||||
| chainsyncer~=0.1.0 | ||||
| eth-erc20~=0.1.10 | ||||
| eth-erc20~=0.1.11 | ||||
| leveldir~=0.3.0 | ||||
|  | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user