Add filter to file backend

This commit is contained in:
nolash 2021-04-10 00:11:10 +02:00
parent c2d3d243b0
commit 3b40f0e6f6
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
3 changed files with 130 additions and 3 deletions

4
.gitignore vendored
View File

@ -1,3 +1,7 @@
__pycache__
*.pyc
*.o
*.egg-info
gmon.out
build/
dist/

View File

@ -4,7 +4,6 @@ import uuid
import shutil
import logging
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger().getChild(__name__)
base_dir = '/var/lib'
@ -39,11 +38,16 @@ class SyncerFileBackend:
self.db_object_filter = None
self.chain_spec = chain_spec
self.filter_count = 0
self.filter = b'\x00'
self.filter_names = []
if self.object_id != None:
self.connect()
self.disconnect()
@staticmethod
def create_object(chain_spec, object_id=None, base_dir=base_dir):
if object_id == None:
@ -78,6 +82,16 @@ class SyncerFileBackend:
f.write(b'\x00' * 16)
f.close()
cursor_path = os.path.join(object_data_dir, 'filter')
f = open(cursor_path, 'wb')
f.write(b'\x00' * 9)
f.close()
filter_name_path = os.path.join(object_data_dir, 'filter_name')
f = open(filter_name_path, 'wb')
f.write(b'')
f.close()
return object_id
@ -103,6 +117,24 @@ class SyncerFileBackend:
self.block_height_cursor = int.from_bytes(b[:8], byteorder='big')
self.tx_index_cursor = int.from_bytes(b[8:], byteorder='big')
filter_path = os.path.join(self.object_data_dir, 'filter')
f = open(filter_path, 'rb')
b = f.read(8)
self.filter_count = int.from_bytes(b, byteorder='big')
filter_count_bytes = int((self.filter_count - 1) / 8 + 1)
if filter_count_bytes > 0:
self.filter = f.read(filter_count_bytes)
f.close()
filter_name_path = filter_path + '_name'
f = open(filter_name_path, 'r')
while True:
s = f.readline().rstrip()
if len(s) == 0:
break
self.filter_names.append(s)
f.close()
def connect(self):
object_path = os.path.join(self.object_data_dir, 'object_id')
@ -127,9 +159,17 @@ class SyncerFileBackend:
return (self.block_height_cursor, self.tx_index_cursor)
def set(self, block_height, tx_index):
return self.__set(block_height, tx_index, 'cursor')
self.__set(block_height, tx_index, 'cursor')
cursor_path = os.path.join(self.object_data_dir, 'filter')
f = open(cursor_path, 'r+b')
f.seek(8)
l = len(self.filter)
c = 0
while c < l:
c += f.write(self.filter)
f.close()
def __set(self, block_height, tx_index, category):
@ -205,3 +245,47 @@ class SyncerFileBackend:
entries = SyncerFileBackend.__sorted_entries(chain_spec, base_dir=base_dir)
return entries[len(entries)-1]
# n is zero-index of bit field
def complete_filter(self, n, base_dir=base_dir):
if self.filter_count <= n:
raise IndexError('index {} out of ranger for filter size {}'.format(n, self.filter_count))
byte_pos = int(n / 8)
bit_pos = n % 8
byts = bytearray(self.filter)
b = (0x80 >> bit_pos)
b |= self.filter[byte_pos]
logg.debug('bbb {}'.format(type(b)))
byts[byte_pos] = b #b.to_bytes(1, byteorder='big')
self.filter = byts
filter_path = os.path.join(self.object_data_dir, 'filter')
f = open(filter_path, 'r+b')
f.seek(8 + byte_pos)
f.write(self.filter)
f.close()
# overwrites disk if manual changed members in struct
def register_filter(self, name):
filter_path = os.path.join(self.object_data_dir, 'filter')
if (self.filter_count + 1) % 8 == 0:
self.filter += b'\x00'
f = open(filter_path, 'a+b')
f.write(b'\x00')
f.close()
filter_name_path = filter_path + '_name'
f = open(filter_name_path, 'a')
f.write(name + '\n')
f.close()
self.filter_count += 1
f = open(filter_path, 'r+b')
b = self.filter_count.to_bytes(8, byteorder='big')
f.write(b)
f.close()

View File

@ -1,4 +1,5 @@
# standard imports
import logging
import uuid
import os
import unittest
@ -10,6 +11,9 @@ from chainlib.chain import ChainSpec
# local imports
from chainsyncer.backend_file import SyncerFileBackend
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger().getChild(__name__)
script_dir = os.path.dirname(__file__)
tmp_test_dir = os.path.join(script_dir, 'testdata', 'tmp')
chainsyncer_test_dir = os.path.join(tmp_test_dir, 'chainsyncer')
@ -21,6 +25,7 @@ class TestFile(unittest.TestCase):
def setUp(self):
self.chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
self.uu = SyncerFileBackend.create_object(self.chain_spec, None, base_dir=tmp_test_dir)
logg.debug('made uu {} for {}'.format(self.uu, self.chain_spec))
self.o = SyncerFileBackend(self.chain_spec, self.uu, base_dir=tmp_test_dir)
@ -30,6 +35,7 @@ class TestFile(unittest.TestCase):
shutil.rmtree(chainsyncer_test_dir)
@unittest.skip('foo')
def test_set(self):
self.o.set(42, 13)
@ -41,6 +47,7 @@ class TestFile(unittest.TestCase):
self.assertEqual(state[1], 13)
@unittest.skip('foo')
def test_initial(self):
local_uu = SyncerFileBackend.initial(self.chain_spec, 1337, start_block_height=666, base_dir=tmp_test_dir)
@ -55,6 +62,7 @@ class TestFile(unittest.TestCase):
self.assertEqual(pair[1], 0)
@unittest.skip('foo')
def test_resume(self):
for i in range(1, 10):
local_uu = SyncerFileBackend.initial(self.chain_spec, 666, start_block_height=i, base_dir=tmp_test_dir)
@ -69,6 +77,7 @@ class TestFile(unittest.TestCase):
last = o.block_height_offset
@unittest.skip('foo')
def test_first(self):
for i in range(1, 10):
local_uu = SyncerFileBackend.initial(self.chain_spec, 666, start_block_height=i, base_dir=tmp_test_dir)
@ -78,5 +87,35 @@ class TestFile(unittest.TestCase):
self.assertEqual(first_entry.block_height_offset, 9)
def test_filter(self):
self.assertEqual(len(self.o.filter), 1)
self.o.register_filter('foo')
self.o.register_filter('bar')
o = SyncerFileBackend(self.chain_spec, self.uu, base_dir=tmp_test_dir)
self.assertEqual(o.filter_count, 2)
self.assertEqual(o.filter_names, ['foo', 'bar'])
self.assertEqual(len(o.filter), 1)
self.o.complete_filter(1)
self.assertEqual(self.o.filter, b'\x40')
self.o.complete_filter(0)
self.assertEqual(self.o.filter, b'\xc0')
o = SyncerFileBackend(self.chain_spec, self.uu, base_dir=tmp_test_dir)
self.assertEqual(o.filter, b'\xc0')
with self.assertRaises(IndexError):
self.o.complete_filter(2)
self.o.register_filter('baz')
self.o.complete_filter(2)
if __name__ == '__main__':
unittest.main()