Add first, resume methods for file backend

This commit is contained in:
nolash 2021-04-09 22:14:58 +02:00
parent 3aa4e5e1a4
commit c2d3d243b0
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
2 changed files with 72 additions and 6 deletions

View File

@ -7,16 +7,22 @@ import logging
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger().getChild(__name__)
base_dir = '/var/lib'
def data_dir_for(chain_spec, object_id, base_dir='/var/lib'):
def chain_dir_for(chain_spec, base_dir=base_dir):
base_data_dir = os.path.join(base_dir, 'chainsyncer')
chain_dir = str(chain_spec).replace(':', '/')
return os.path.join(base_data_dir, chain_dir, object_id)
return os.path.join(base_data_dir, str(chain_spec).replace(':', '/'))
def data_dir_for(chain_spec, object_id, base_dir=base_dir):
chain_dir = chain_dir_for(chain_spec, base_dir=base_dir)
return os.path.join(chain_dir, object_id)
class SyncerFileBackend:
def __init__(self, chain_spec, object_id=None, base_dir='/var/lib'):
def __init__(self, chain_spec, object_id=None, base_dir=base_dir):
self.object_data_dir = data_dir_for(chain_spec, object_id, base_dir=base_dir)
self.block_height_offset = 0
@ -39,7 +45,7 @@ class SyncerFileBackend:
@staticmethod
def create_object(chain_spec, object_id=None, base_dir='/var/lib'):
def create_object(chain_spec, object_id=None, base_dir=base_dir):
if object_id == None:
object_id = str(uuid.uuid4())
@ -142,7 +148,7 @@ class SyncerFileBackend:
@staticmethod
def initial(chain_spec, target_block_height, start_block_height=0, base_dir='/var/lib'):
def initial(chain_spec, target_block_height, start_block_height=0, base_dir=base_dir):
if start_block_height >= target_block_height:
raise ValueError('start block height must be lower than target block height')
@ -161,3 +167,41 @@ class SyncerFileBackend:
def start(self):
return ((self.block_height_offset, self.tx_index_offset), None,)
@staticmethod
def __sorted_entries(chain_spec, base_dir=base_dir):
chain_dir = chain_dir_for(chain_spec, base_dir=base_dir)
entries = {}
for v in os.listdir(chain_dir):
d = os.path.realpath(os.path.join(chain_dir, v))
f = open(os.path.join(d, 'object_id'))
object_id = f.read()
f.close()
logg.debug('found syncer entry {} in {}'.format(object_id, d))
o = SyncerFileBackend(chain_spec, object_id, base_dir=base_dir)
entries[o.block_height_offset] = o
sorted_entries = []
for k in sorted(entries):
sorted_entries.append(entries[k])
return sorted_entries
@staticmethod
def resume(chain_spec, base_dir=base_dir):
return SyncerFileBackend.__sorted_entries(chain_spec, base_dir=base_dir)
@staticmethod
def first(chain_spec, base_dir=base_dir):
entries = SyncerFileBackend.__sorted_entries(chain_spec, base_dir=base_dir)
return entries[len(entries)-1]

View File

@ -55,6 +55,28 @@ class TestFile(unittest.TestCase):
self.assertEqual(pair[1], 0)
def test_resume(self):
for i in range(1, 10):
local_uu = SyncerFileBackend.initial(self.chain_spec, 666, start_block_height=i, base_dir=tmp_test_dir)
entries = SyncerFileBackend.resume(self.chain_spec, base_dir=tmp_test_dir)
self.assertEqual(len(entries), 10)
last = -1
for o in entries:
self.assertLess(last, o.block_height_offset)
last = o.block_height_offset
def test_first(self):
for i in range(1, 10):
local_uu = SyncerFileBackend.initial(self.chain_spec, 666, start_block_height=i, base_dir=tmp_test_dir)
first_entry = SyncerFileBackend.first(self.chain_spec, base_dir=tmp_test_dir)
self.assertEqual(first_entry.block_height_offset, 9)
if __name__ == '__main__':
unittest.main()