diff --git a/apps/cic-cache/cic_cache/api.py b/apps/cic-cache/cic_cache/api.py
index 0340e65c..74083ba5 100644
--- a/apps/cic-cache/cic_cache/api.py
+++ b/apps/cic-cache/cic_cache/api.py
@@ -55,15 +55,37 @@ class Api:
                     queue=callback_queue,
                     )       
 
-    def list(self, offset, limit, address=None):
+    def list(self, offset=0, limit=100, address=None, oldest=False):
         s = celery.signature(
         'cic_cache.tasks.tx.tx_filter',
         [
-            0,
-            100,
+            offset,
+            limit,
             address,
+            oldest,
             ],
-            queue=None
+            queue=self.queue,
+        )
+        if self.callback_param != None:
+            s.link(self.callback_success).on_error(self.callback_error)
+
+        t = s.apply_async()
+
+        return t
+
+
+    def list_content(self, offset=0, limit=100, address=None, block_offset=None, block_limit=None, oldest=False):
+        s = celery.signature(
+        'cic_cache.tasks.tx.tx_filter_content',
+        [
+            offset,
+            limit,
+            address,
+            block_offset,
+            block_limit,
+            oldest,
+            ],
+            queue=self.queue,
         )
         if self.callback_param != None:
             s.link(self.callback_success).on_error(self.callback_error)
diff --git a/apps/cic-cache/cic_cache/cache.py b/apps/cic-cache/cic_cache/cache.py
index 7c4d1040..10e8c458 100644
--- a/apps/cic-cache/cic_cache/cache.py
+++ b/apps/cic-cache/cic_cache/cache.py
@@ -10,12 +10,16 @@ from cic_cache.db.list import (
         list_transactions_mined,
         list_transactions_account_mined,
         list_transactions_mined_with_data,
+        list_transactions_mined_with_data_index,
+        list_transactions_account_mined_with_data_index,
+        list_transactions_account_mined_with_data,
         )
 
 logg = logging.getLogger()
 
 
 DEFAULT_FILTER_SIZE = 8192 * 8
+DEFAULT_LIMIT = 100
 
 class Cache:
 
@@ -32,7 +36,7 @@ class BloomCache(Cache):
         return n
 
 
-    def load_transactions(self, offset, limit):
+    def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
         """Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
 
         Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
@@ -49,7 +53,7 @@ class BloomCache(Cache):
         :return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
         :rtype: tuple
         """
-        rows = list_transactions_mined(self.session, offset, limit) 
+        rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
 
         f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
         f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
@@ -58,7 +62,12 @@ class BloomCache(Cache):
         for r in rows:
             if highest_block == -1:
                 highest_block = r[0]
-            lowest_block = r[0]
+                lowest_block = r[0]
+            else:
+                if oldest:
+                    highest_block = r[0]
+                else:
+                    lowest_block = r[0]
             block = r[0].to_bytes(4, byteorder='big')
             tx = r[1].to_bytes(4, byteorder='big')
             f_block.add(block)
@@ -67,7 +76,7 @@ class BloomCache(Cache):
         return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
 
 
-    def load_transactions_account(self, address, offset, limit):
+    def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
         """Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
 
         :param address: Address to retrieve transactions for.
@@ -79,7 +88,7 @@ class BloomCache(Cache):
         :return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
         :rtype: tuple
         """
-        rows = list_transactions_account_mined(self.session, address, offset, limit) 
+        rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest) 
 
         f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
         f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
@@ -88,7 +97,12 @@ class BloomCache(Cache):
         for r in rows:
             if highest_block == -1:
                 highest_block = r[0]
-            lowest_block = r[0]
+                lowest_block = r[0]
+            else:
+                if oldest:
+                    highest_block = r[0]
+                else:
+                    lowest_block = r[0]
             block = r[0].to_bytes(4, byteorder='big')
             tx = r[1].to_bytes(4, byteorder='big')
             f_block.add(block)
@@ -99,8 +113,21 @@ class BloomCache(Cache):
 
 class DataCache(Cache):
 
-    def load_transactions_with_data(self, offset, end):
-        rows = list_transactions_mined_with_data(self.session, offset, end) 
+    def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
+        if limit == 0:
+            limit = DEFAULT_LIMIT
+        rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest) 
+        return self.__process_rows(rows, oldest)
+
+
+    def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
+        if limit == 0:
+            limit = DEFAULT_LIMIT
+        rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest) 
+        return self.__process_rows(rows, oldest)
+
+
+    def __process_rows(self, rows, oldest):
         tx_cache = []
         highest_block = -1;
         lowest_block = -1;
@@ -108,7 +135,12 @@ class DataCache(Cache):
         for r in rows:
             if highest_block == -1:
                 highest_block = r['block_number']
-            lowest_block = r['block_number']
+                lowest_block = r['block_number']
+            else:
+                if oldest:
+                    highest_block = r['block_number']
+                else:
+                    lowest_block = r['block_number']
             tx_type = 'unknown'
 
             if r['value'] != None:
diff --git a/apps/cic-cache/cic_cache/cli/arg.py b/apps/cic-cache/cic_cache/cli/arg.py
index 43428cb1..2d4e6e8a 100644
--- a/apps/cic-cache/cic_cache/cli/arg.py
+++ b/apps/cic-cache/cic_cache/cli/arg.py
@@ -12,7 +12,7 @@ class ArgumentParser(BaseArgumentParser):
 
     def process_local_flags(self, local_arg_flags):
         if local_arg_flags & CICFlag.CELERY:
-            self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-eth', help='Task queue')
+            self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-cache', help='Task queue')
         if local_arg_flags & CICFlag.SYNCER:
             self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
             self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
diff --git a/apps/cic-cache/cic_cache/data/config/celery.ini b/apps/cic-cache/cic_cache/data/config/celery.ini
index f2ad10ab..a799cb4a 100644
--- a/apps/cic-cache/cic_cache/data/config/celery.ini
+++ b/apps/cic-cache/cic_cache/data/config/celery.ini
@@ -1,5 +1,5 @@
 [celery]
 broker_url = redis://localhost:6379
 result_url = 
-queue = cic-eth
+queue = cic-cache
 debug = 0
diff --git a/apps/cic-cache/cic_cache/db/list.py b/apps/cic-cache/cic_cache/db/list.py
index 62e0a2e0..4052471e 100644
--- a/apps/cic-cache/cic_cache/db/list.py
+++ b/apps/cic-cache/cic_cache/db/list.py
@@ -13,6 +13,9 @@ def list_transactions_mined(
         session,
         offset,
         limit,
+        block_offset,
+        block_limit,
+        oldest=False,
         ):
     """Executes db query to return all confirmed transactions according to the specified offset and limit.
 
@@ -23,15 +26,62 @@ def list_transactions_mined(
     :result: Result set
     :rtype: SQLAlchemy.ResultProxy
     """
-    s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset)
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
+        else:
+            s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
+    else:
+        s = "SELECT block_number, tx_index FROM tx ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
     r = session.execute(s)
     return r
 
 
 def list_transactions_mined_with_data(
+        session,
+        offset,
+        limit,
+        block_offset,
+        block_limit,
+        oldest=False,
+        ):
+    """Executes db query to return all confirmed transactions according to the specified offset and limit.
+
+    :param block_offset: First block to include in search
+    :type block_offset: int
+    :param block_limit: Last block to include in search
+    :type block_limit: int
+    :result: Result set
+    :rtype: SQLAlchemy.ResultProxy
+    """
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
+        else:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
+    else:
+        s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
+
+
+    r = session.execute(s)
+    return r
+
+
+def list_transactions_mined_with_data_index(
         session,
         offset,
         end,
+        block_offset,
+        block_limit,
+        oldest=False,
         ):
     """Executes db query to return all confirmed transactions according to the specified offset and limit.
 
@@ -42,7 +92,87 @@ def list_transactions_mined_with_data(
     :result: Result set
     :rtype: SQLAlchemy.ResultProxy
     """
-    s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
+
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, offset, end)
+        else:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, offset, end)
+    else:
+        s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, offset, end)
+
+    r = session.execute(s)
+    return r
+
+
+def list_transactions_account_mined_with_data_index(
+        session,
+        address,
+        offset,
+        limit,
+        block_offset,
+        block_limit,
+        oldest=False,
+        ):
+    """Executes db query to return all confirmed transactions according to the specified offset and limit, filtered by address
+
+    :param offset: Offset in data set to return transactions from
+    :type offset: int
+    :param limit: Max number of transactions to retrieve
+    :type limit: int
+    :result: Result set
+    :rtype: SQLAlchemy.ResultProxy
+    """
+
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
+        else:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
+    else:
+        s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
+
+    r = session.execute(s)
+    return r
+
+def list_transactions_account_mined_with_data(
+        session,
+        address,
+        offset,
+        limit,
+        block_offset,
+        block_limit,
+        oldest=False,
+        ):
+    """Executes db query to return all confirmed transactions according to the specified offset and limit.
+
+    :param block_offset: First block to include in search
+    :type block_offset: int
+    :param block_limit: Last block to include in search
+    :type block_limit: int
+    :result: Result set
+    :rtype: SQLAlchemy.ResultProxy
+    """
+
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
+        else:
+            s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
+    else:
+        s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
 
     r = session.execute(s)
     return r
@@ -53,6 +183,9 @@ def list_transactions_account_mined(
         address,
         offset,
         limit,
+        block_offset,
+        block_limit,
+        oldest=False,
         ):
     """Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
 
@@ -65,7 +198,20 @@ def list_transactions_account_mined(
     :result: Result set
     :rtype: SQLAlchemy.ResultProxy
     """
-    s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
+
+    order_by = 'DESC'
+    if oldest:
+        order_by = 'ASC'
+
+    if block_offset:
+        if block_limit:
+            s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
+        else:
+            s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
+
+    else:
+        s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
+
     r = session.execute(s)
     return r
 
diff --git a/apps/cic-cache/cic_cache/runnable/daemons/query.py b/apps/cic-cache/cic_cache/runnable/daemons/query.py
index a698a692..d7edf6ab 100644
--- a/apps/cic-cache/cic_cache/runnable/daemons/query.py
+++ b/apps/cic-cache/cic_cache/runnable/daemons/query.py
@@ -91,13 +91,14 @@ def process_transactions_all_data(session, env):
     if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
         return None
 
-    offset = r[1]
-    end = r[2]
+    logg.debug('got data request {}'.format(env))
+    block_offset = r[1]
+    block_end = r[2]
     if int(r[2]) < int(r[1]):
         raise ValueError('cart before the horse, dude')
 
     c = DataCache(session)
-    (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end)
+    (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
 
     for r in tx_cache:
         r['date_block'] = r['date_block'].timestamp()
diff --git a/apps/cic-cache/cic_cache/tasks/tx.py b/apps/cic-cache/cic_cache/tasks/tx.py
index 0e2f7435..7524fe6f 100644
--- a/apps/cic-cache/cic_cache/tasks/tx.py
+++ b/apps/cic-cache/cic_cache/tasks/tx.py
@@ -2,14 +2,17 @@
 import celery
 
 # local imports
-from cic_cache.cache import BloomCache
+from cic_cache.cache import (
+        BloomCache,
+        DataCache,
+        )
 from cic_cache.db.models.base import SessionBase
 
 celery_app = celery.current_app
 
 
 @celery_app.task(bind=True)
-def tx_filter(self, offset, limit, address=None, encoding='hex'):
+def tx_filter(self, offset, limit, address=None, oldest=False, encoding='hex'):
     queue = self.request.delivery_info.get('routing_key')
 
     session = SessionBase.create_session()
@@ -17,9 +20,9 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
     c = BloomCache(session)
     b = None
     if address == None:
-        (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
+        (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit, oldest=oldest)
     else:
-        (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
+        (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit, oldest=oldest)
 
     session.close()
 
@@ -35,4 +38,17 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
     return o
 
 
+@celery_app.task(bind=True)
+def tx_filter_content(self, offset, limit, address=None, block_offset=None, block_limit=None, oldest=False, encoding='hex'):
+    session = SessionBase.create_session()
 
+    c = DataCache(session)
+    b = None
+    if address == None:
+        (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
+    else:
+        (lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data_index(address, offset, limit, block_offset=block_offset, block_limit=block_limit)
+
+    session.close()
+
+    return (lowest_block, highest_block, tx_cache,)
diff --git a/apps/cic-cache/cic_cache/version.py b/apps/cic-cache/cic_cache/version.py
index 338bb831..b5293a59 100644
--- a/apps/cic-cache/cic_cache/version.py
+++ b/apps/cic-cache/cic_cache/version.py
@@ -4,8 +4,8 @@ import semver
 version = (
         0,
         2,
-        0,
-        'alpha.2',
+        1,
+        'alpha.1',
         )
 
 version_object = semver.VersionInfo(
diff --git a/apps/cic-cache/requirements.txt b/apps/cic-cache/requirements.txt
index e01c9bbe..40e9bdeb 100644
--- a/apps/cic-cache/requirements.txt
+++ b/apps/cic-cache/requirements.txt
@@ -9,6 +9,7 @@ psycopg2==2.8.6
 celery==4.4.7
 redis==3.5.3
 chainsyncer[sql]>=0.0.6a1,<0.1.0
-erc20-faucet~=0.2.4a1
-chainlib>=0.0.7a1,<0.1.0
+erc20-faucet>=0.2.4a2, <0.3.0
+chainlib-eth>=0.0.7a3,<0.1.0
+chainlib>=0.0.7a3,<0.1.0
 eth-address-index>=0.1.4a1,<0.2.0
diff --git a/apps/cic-cache/setup.cfg b/apps/cic-cache/setup.cfg
index b6de6a9c..f8393c92 100644
--- a/apps/cic-cache/setup.cfg
+++ b/apps/cic-cache/setup.cfg
@@ -41,3 +41,4 @@ console_scripts =
 	cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main
 	cic-cache-serverd = cic_cache.runnable.daemons.server:main
 	cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main
+	cic-cache-list = cic_cache.runable.list:main
diff --git a/apps/cic-cache/tests/cli/test_cli_args.py b/apps/cic-cache/tests/cli/test_cli_args.py
new file mode 100644
index 00000000..6e0c8a26
--- /dev/null
+++ b/apps/cic-cache/tests/cli/test_cli_args.py
@@ -0,0 +1,40 @@
+# standard imports
+import os
+
+# external imports
+import chainlib.cli 
+
+# local imports
+import cic_cache.cli
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+config_dir = os.path.join(script_dir, '..', 'testdata', 'config')
+
+
+def test_argumentparserto_config():
+
+    argparser = cic_cache.cli.ArgumentParser()
+    
+    local_flags = 0xffff
+    argparser.process_local_flags(local_flags) 
+    argparser.add_argument('--foo', type=str)
+    args = argparser.parse_args([
+        '-q', 'baz',
+        '--offset', '13',
+        '--no-history',
+        '-r','0xdeadbeef',
+        '-vv',
+        '--foo', 'bar',
+        ])
+
+    extra_args = {
+            'foo': '_BARBARBAR',
+            }
+    config = cic_cache.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
+
+    assert config.get('_BARBARBAR') == 'bar'
+    assert config.get('CELERY_QUEUE') == 'baz'
+    assert config.get('SYNCER_NO_HISTORY') == True
+    assert config.get('SYNCER_OFFSET') == 13
+    assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'
+
diff --git a/apps/cic-cache/tests/cli/test_cli_celery.py b/apps/cic-cache/tests/cli/test_cli_celery.py
new file mode 100644
index 00000000..81d975af
--- /dev/null
+++ b/apps/cic-cache/tests/cli/test_cli_celery.py
@@ -0,0 +1,17 @@
+# standard imports
+import tempfile
+
+# local imports
+import cic_cache.cli
+
+
+def test_cli_celery():
+    cf = tempfile.mkdtemp()
+
+    config = {
+            'CELERY_RESULT_URL': 'filesystem://' + cf,
+            }
+    cic_cache.cli.CeleryApp.from_config(config)
+
+    config['CELERY_BROKER_URL'] = 'filesystem://' + cf
+    cic_cache.cli.CeleryApp.from_config(config)
diff --git a/apps/cic-cache/tests/cli/test_cli_chain.py b/apps/cic-cache/tests/cli/test_cli_chain.py
new file mode 100644
index 00000000..0cf9d5da
--- /dev/null
+++ b/apps/cic-cache/tests/cli/test_cli_chain.py
@@ -0,0 +1,68 @@
+# external imports
+import pytest
+from chainlib.eth.gas import (
+        Gas,
+        RPCGasOracle,
+        )
+from chainlib.eth.nonce import RPCNonceOracle
+from chainlib.eth.block import (
+        block_latest,
+        Block,
+        )
+from chainlib.eth.pytest.fixtures_chain import default_chain_spec
+from chainlib.eth.pytest.fixtures_ethtester import *
+from cic_eth_registry.pytest.fixtures_contracts import *
+from hexathon import add_0x
+
+# local imports
+import cic_cache.cli
+
+
+@pytest.mark.xfail()
+def test_cli_rpc(
+        eth_rpc,
+        eth_signer,
+        default_chain_spec,
+        ):
+    config = {
+        'CHAIN_SPEC': str(default_chain_spec),
+        'RPC_HTTP_PROVIDER': 'http://localhost:8545',
+            }
+    rpc = cic_cache.cli.RPC.from_config(config, default_label='foo') 
+    conn = rpc.get_by_label('foo')
+    #o = block_latest()
+    #conn.do(o)
+
+
+def test_cli_chain(
+        default_chain_spec,
+        eth_rpc,
+        eth_signer,
+        contract_roles,
+        ):
+    ifc = cic_cache.cli.EthChainInterface()
+
+    nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
+    gas_oracle = RPCGasOracle(conn=eth_rpc)
+    c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
+    recipient = add_0x(os.urandom(20).hex())
+    (tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], recipient, 1024)
+    r = eth_rpc.do(o)
+
+    o = ifc.tx_receipt(r)
+    r = eth_rpc.do(o)
+    assert r['status'] == 1
+
+    o = ifc.block_by_number(1)
+    block_src = eth_rpc.do(o)
+    block = ifc.block_from_src(block_src)
+    assert block.number == 1
+
+    with pytest.raises(KeyError):
+        assert block_src['gasUsed'] == 21000
+        assert block_src['gas_used'] == 21000
+
+    block_src = ifc.src_normalize(block_src)
+    assert block_src['gasUsed'] == 21000
+    assert block_src['gas_used'] == 21000
+
diff --git a/apps/cic-cache/tests/conftest.py b/apps/cic-cache/tests/conftest.py
index 61db702f..478c261f 100644
--- a/apps/cic-cache/tests/conftest.py
+++ b/apps/cic-cache/tests/conftest.py
@@ -64,7 +64,6 @@ def txs(
         dt.timestamp(),
             )
 
-
     tx_number = 42
     tx_hash_second = '0x' + os.urandom(32).hex()
     tx_signed_second = '0x' + os.urandom(128).hex()
@@ -93,6 +92,44 @@ def txs(
             ]
 
 
+@pytest.fixture(scope='function')
+def more_txs(
+        init_database,
+        list_defaults,
+        list_actors,
+        list_tokens,
+        txs,
+        ):
+
+    session = init_database
+
+    tx_number = 666
+    tx_hash = '0x' + os.urandom(32).hex()
+    tx_signed = '0x' + os.urandom(128).hex()
+    nonce = 3
+
+    dt = datetime.datetime.utcnow()
+    dt += datetime.timedelta(hours=1)
+    db.add_transaction(
+        session,
+        tx_hash,
+        list_defaults['block']+2,
+        tx_number,
+        list_actors['alice'],
+        list_actors['diane'],
+        list_tokens['bar'],
+        list_tokens['bar'],
+        2048,
+        4096,
+        False,
+        dt.timestamp(),
+        )
+
+    session.commit()
+
+    return [tx_hash] + txs
+
+
 @pytest.fixture(scope='function')
 def tag_txs(
         init_database,
diff --git a/apps/cic-cache/tests/test_cache.py b/apps/cic-cache/tests/test_cache.py
index cdd038c1..a5fdcc0d 100644
--- a/apps/cic-cache/tests/test_cache.py
+++ b/apps/cic-cache/tests/test_cache.py
@@ -8,6 +8,7 @@ import json
 import pytest
 
 # local imports
+from cic_cache import db
 from cic_cache import BloomCache
 from cic_cache.cache import DataCache
 
@@ -18,7 +19,6 @@ def test_cache(
         init_database,
         list_defaults,
         list_actors,
-        list_tokens,
         txs,
         ):
 
@@ -37,9 +37,6 @@ def test_cache(
 
 def test_cache_data(
         init_database,
-        list_defaults,
-        list_actors,
-        list_tokens,
         txs,
         tag_txs,
         ):
@@ -47,10 +44,209 @@ def test_cache_data(
     session = init_database
 
     c = DataCache(session)
-    b = c.load_transactions_with_data(410000, 420000)
+    b = c.load_transactions_with_data(0, 3) #410000, 420000) #, 100, block_offset=410000, block_limit=420000, oldest=True)
 
     assert len(b[2]) == 2
-    assert b[2][0]['tx_hash'] == txs[1]
-    assert b[2][1]['tx_type'] == 'unknown'
-    assert b[2][0]['tx_type'] == 'test.taag'
+    assert b[2][0]['tx_hash'] == txs[0]
+    assert b[2][0]['tx_type'] == 'unknown'
+    assert b[2][1]['tx_type'] == 'test.taag'
+
+
+def test_cache_ranges(
+        init_database,
+        list_defaults,
+        list_actors,
+        list_tokens,
+        more_txs,
+        ):
+
+    session = init_database
+       
+    oldest = list_defaults['block'] - 1
+    mid = list_defaults['block']
+    newest = list_defaults['block'] + 2
+
+    c = BloomCache(session)
+    b = c.load_transactions(0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+
+    b = c.load_transactions(1, 2)
+    assert b[0] == oldest
+    assert b[1] == mid
+
+    b = c.load_transactions(0, 2)
+    assert b[0] == mid
+    assert b[1] == newest
+
+    b = c.load_transactions(0, 1)
+    assert b[0] == newest
+    assert b[1] == newest
+
+    b = c.load_transactions(0, 100, oldest=True)
+    assert b[0] == oldest
+    assert b[1] == newest
+
+    b = c.load_transactions(0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
+
+    b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == oldest
+    assert b[1] == mid
+
+    b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
+    assert b[0] == oldest
+    assert b[1] == mid
+
+    # now check when supplying account
+    b = c.load_transactions_account(list_actors['alice'], 0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+
+    b = c.load_transactions_account(list_actors['bob'], 0, 100)
+    assert b[0] == mid
+    assert b[1] == mid
+
+    b = c.load_transactions_account(list_actors['diane'], 0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+
+    # add block filter to the mix
+    b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
     
+    b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
+
+    b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == mid
+
+    b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == oldest
+    assert b[1] == oldest
+
+
+def test_cache_ranges_data( 
+        init_database,
+        list_defaults,
+        list_actors,
+        list_tokens,
+        more_txs,
+        ):
+
+    session = init_database
+       
+    oldest = list_defaults['block'] - 1
+    mid = list_defaults['block']
+    newest = list_defaults['block'] + 2
+
+    c = DataCache(session)
+
+    b = c.load_transactions_with_data(0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+    assert len(b[2]) == 3
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][2]['tx_hash'] == more_txs[2]
+
+    b = c.load_transactions_with_data(1, 2)
+    assert b[0] == oldest
+    assert b[1] == mid
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[1]
+    assert b[2][1]['tx_hash'] == more_txs[2]
+
+    b = c.load_transactions_with_data(0, 2)
+    assert b[0] == mid
+    assert b[1] == newest
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_with_data(0, 1)
+    assert b[0] == newest
+    assert b[1] == newest
+    assert len(b[2]) == 1
+    assert b[2][0]['tx_hash'] == more_txs[0]
+
+    b = c.load_transactions_with_data(0, 100, oldest=True)
+    assert b[0] == oldest
+    assert b[1] == newest
+    assert len(b[2]) == 3
+    assert b[2][0]['tx_hash'] == more_txs[2]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+    assert b[2][2]['tx_hash'] == more_txs[0]
+
+    b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == oldest
+    assert b[1] == mid
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[1]
+    assert b[2][1]['tx_hash'] == more_txs[2]
+
+    b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
+    assert b[0] == oldest
+    assert b[1] == mid
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[2]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+
+    # now check when supplying account
+    b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+    assert len(b[2]) == 3
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+    assert b[2][2]['tx_hash'] == more_txs[2]
+
+    b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
+    assert b[0] == mid
+    assert b[1] == mid
+    assert len(b[2]) == 1
+    assert b[2][0]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
+    assert b[0] == oldest
+    assert b[1] == newest
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[2]
+
+    # add block filter to the mix
+    b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == newest
+    assert len(b[2]) == 2
+    assert b[2][0]['tx_hash'] == more_txs[0]
+    assert b[2][1]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == mid
+    assert b[1] == mid
+    assert len(b[2]) == 1
+    assert b[2][0]['tx_hash'] == more_txs[1]
+
+    b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
+    assert b[0] == oldest
+    assert b[1] == oldest
+    assert len(b[2]) == 1
+    assert b[2][0]['tx_hash'] == more_txs[2]
diff --git a/apps/cic-cache/tests/testdata/config/test.ini b/apps/cic-cache/tests/testdata/config/test.ini
new file mode 100644
index 00000000..70903618
--- /dev/null
+++ b/apps/cic-cache/tests/testdata/config/test.ini
@@ -0,0 +1,2 @@
+[foo]
+bar_baz = xyzzy
diff --git a/apps/cic-eth/cic_eth/api/api_task.py b/apps/cic-eth/cic_eth/api/api_task.py
index 64448bfb..79501d30 100644
--- a/apps/cic-eth/cic_eth/api/api_task.py
+++ b/apps/cic-eth/cic_eth/api/api_task.py
@@ -520,9 +520,9 @@ class Api(ApiBase):
             s_external_get = celery.signature(
                 external_task,
                 [
-                    address,
                     offset,
                     limit,
+                    address,
                     ],
                 queue=external_queue,
                 )
diff --git a/apps/cic-eth/cic_eth/cli/rpc.py b/apps/cic-eth/cic_eth/cli/rpc.py
index cc073767..25e28f9b 100644
--- a/apps/cic-eth/cic_eth/cli/rpc.py
+++ b/apps/cic-eth/cic_eth/cli/rpc.py
@@ -24,20 +24,24 @@ class RPC:
 
 
     def get_default(self):
-        return RPCConnection.connect(self.chain_spec, 'default')
+        return self.get_by_label('default')
+
+
+    def get_by_label(self, label):
+        return RPCConnection.connect(self.chain_spec, label)
 
 
     @staticmethod
-    def from_config(config, use_signer=False):
+    def from_config(config, use_signer=False, default_label='default', signer_label='signer'):
         chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
 
-        RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, 'default')
+        RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, default_label)
         if use_signer:
 
-            RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, 'signer')
-            RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, 'signer')
-            RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, 'signer')
-            RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, 'signer')
+            RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, signer_label)
+            RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, signer_label)
+            RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, signer_label)
+            RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, signer_label) 
         rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
         logg.info('set up rpc: {}'.format(rpc))
         return rpc
diff --git a/apps/cic-eth/cic_eth/data/config/dispatcher.ini b/apps/cic-eth/cic_eth/data/config/dispatcher.ini
new file mode 100644
index 00000000..40e1b86e
--- /dev/null
+++ b/apps/cic-eth/cic_eth/data/config/dispatcher.ini
@@ -0,0 +1,2 @@
+[dispatcher]
+loop_interval = 1
diff --git a/apps/cic-eth/cic_eth/data/config/eth.ini b/apps/cic-eth/cic_eth/data/config/eth.ini
index 3bba24bf..fd58e918 100644
--- a/apps/cic-eth/cic_eth/data/config/eth.ini
+++ b/apps/cic-eth/cic_eth/data/config/eth.ini
@@ -1,2 +1,2 @@
 [eth]
-gas_gifter_minimum_balance = 10000000000000000000
+gas_gifter_minimum_balance = 10000000000000000000000
diff --git a/apps/cic-eth/cic_eth/data/config/retry.ini b/apps/cic-eth/cic_eth/data/config/retry.ini
new file mode 100644
index 00000000..4bec6b6b
--- /dev/null
+++ b/apps/cic-eth/cic_eth/data/config/retry.ini
@@ -0,0 +1,3 @@
+[retry]
+delay =
+batch_size =
diff --git a/apps/cic-eth/cic_eth/ext/tx.py b/apps/cic-eth/cic_eth/ext/tx.py
index 77083874..3adefa2e 100644
--- a/apps/cic-eth/cic_eth/ext/tx.py
+++ b/apps/cic-eth/cic_eth/ext/tx.py
@@ -12,9 +12,11 @@ from chainlib.eth.tx import (
         transaction_by_block,
         receipt,
         )
+from chainlib.eth.error import RequestMismatchException
 from chainlib.eth.block import block_by_number
 from chainlib.eth.contract import abi_decode_single
 from chainlib.eth.constant import ZERO_ADDRESS
+from chainlib.eth.tx import Tx
 from hexathon import strip_0x
 from cic_eth_registry import CICRegistry
 from cic_eth_registry.erc20 import ERC20Token
@@ -23,6 +25,8 @@ from chainqueue.db.models.otx import Otx
 from chainqueue.db.enum import StatusEnum
 from chainqueue.sql.query import get_tx_cache
 from eth_erc20 import ERC20
+from erc20_faucet import Faucet
+from potaahto.symbols import snake_and_camel
 
 # local imports
 from cic_eth.queue.time import tx_times
@@ -35,6 +39,32 @@ logg = logging.getLogger()
 MAX_BLOCK_TX = 250
 
 
+def parse_transaction(chain_spec, rpc, tx, sender_address=None):
+    try:
+        transfer_data = ERC20.parse_transfer_request(tx['input'])
+        tx_address = transfer_data[0]
+        tx_token_value = transfer_data[1]
+        logg.debug('matched transfer transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
+        return (tx_address, tx_token_value)
+    except RequestMismatchException:
+        pass
+
+    try:
+        transfer_data = Faucet.parse_give_to_request(tx['input'])
+        tx_address = transfer_data[0]
+        c = Faucet(chain_spec)
+        o = c.token_amount(tx['to'], sender_address=sender_address, height=tx['block_number'])
+        r = rpc.do(o)
+        tx_token_value = Faucet.parse_token_amount(r)
+        logg.debug('matched giveto transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
+        return (tx_address, tx_token_value)
+
+    except RequestMismatchException:
+        pass
+
+    return None
+
+
 # TODO: Make this method easier to read
 @celery_app.task(bind=True, base=BaseTask)
 def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
@@ -71,36 +101,39 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
     tx_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=tx_filter_data)
 
     txs = {}
+    logg.debug('processing filter with span low {} to high {}'.format(bloomspec['low'], bloomspec['high']))
     for block_height in range(bloomspec['low'], bloomspec['high']):
         block_height_bytes = block_height.to_bytes(4, 'big')
         if block_filter.check(block_height_bytes):
             logg.debug('filter matched block {}'.format(block_height))
             o = block_by_number(block_height)
             block = rpc.do(o)
-            logg.debug('block {}'.format(block))
 
             for tx_index in range(0, len(block['transactions'])):
-                composite = tx_index + block_height
-                tx_index_bytes = composite.to_bytes(4, 'big')
-                if tx_filter.check(tx_index_bytes):
+                tx_index_bytes = tx_index.to_bytes(4, 'big')
+                composite = block_height_bytes + tx_index_bytes
+                if tx_filter.check(composite):
                     logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
 
+                    o = transaction_by_block(block['hash'], tx_index)
                     try:
-                        #tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
-                        o = transaction_by_block(block['hash'], tx_index)
                         tx = rpc.do(o)
                     except Exception as e:
                         logg.debug('false positive on block {} tx {} ({})'.format(block_height, tx_index, e))
                         continue
+
+                    tx = Tx(tx).src()
+
+                    logg.debug('got tx {}'.format(tx))
                     tx_address = None
                     tx_token_value = 0
-                    try:
-                        transfer_data = ERC20.parse_transfer_request(tx['data'])
-                        tx_address = transfer_data[0]
-                        tx_token_value = transfer_data[1]
-                    except ValueError:
-                        logg.debug('not a transfer transaction, skipping {}'.format(tx))
+
+                    transfer_data = parse_transaction(chain_spec, rpc, tx, sender_address=BaseTask.call_address)
+                    if transfer_data == None:
                         continue
+                    tx_address = transfer_data[0]
+                    tx_token_value = transfer_data[1]
+                    
                     if address == tx_address:
                         status = StatusEnum.SENT
                         try:
@@ -136,6 +169,7 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
     return txs
 
 
+
 # TODO: Surely it must be possible to optimize this
 # TODO: DRY this with callback filter in cic_eth/runnable/manager
 # TODO: Remove redundant fields from end representation (timestamp, tx_hash)
diff --git a/apps/cic-eth/config/database.ini b/apps/cic-eth/config/database.ini
deleted file mode 100644
index 6448be0e..00000000
--- a/apps/cic-eth/config/database.ini
+++ /dev/null
@@ -1,10 +0,0 @@
-[database]
-NAME=cic_eth
-USER=postgres
-PASSWORD=tralala
-HOST=localhost
-PORT=63432
-ENGINE=postgresql
-DRIVER=psycopg2
-POOL_SIZE=50
-DEBUG=0
diff --git a/apps/cic-eth/config/docker/chain.ini b/apps/cic-eth/config/docker/chain.ini
new file mode 100644
index 00000000..3a23481e
--- /dev/null
+++ b/apps/cic-eth/config/docker/chain.ini
@@ -0,0 +1,2 @@
+[chain]
+spec = evm:bloxberg:8996
diff --git a/apps/cic-eth/config/docker/database.ini b/apps/cic-eth/config/docker/database.ini
deleted file mode 100644
index 6448be0e..00000000
--- a/apps/cic-eth/config/docker/database.ini
+++ /dev/null
@@ -1,10 +0,0 @@
-[database]
-NAME=cic_eth
-USER=postgres
-PASSWORD=tralala
-HOST=localhost
-PORT=63432
-ENGINE=postgresql
-DRIVER=psycopg2
-POOL_SIZE=50
-DEBUG=0
diff --git a/apps/cic-eth/requirements.txt b/apps/cic-eth/requirements.txt
index 196a1284..968deb53 100644
--- a/apps/cic-eth/requirements.txt
+++ b/apps/cic-eth/requirements.txt
@@ -1,3 +1,3 @@
 celery==4.4.7
-chainlib-eth>=0.0.7a1,<0.1.0
+chainlib-eth>=0.0.7a5,<0.1.0
 semver==2.13.0
diff --git a/apps/cic-eth/services_requirements.txt b/apps/cic-eth/services_requirements.txt
index 5308d2f4..80d8009c 100644
--- a/apps/cic-eth/services_requirements.txt
+++ b/apps/cic-eth/services_requirements.txt
@@ -9,7 +9,7 @@ liveness~=0.0.1a7
 eth-address-index>=0.1.4a1,<0.2.0
 eth-accounts-index>=0.0.14a1,<0.1.0
 cic-eth-registry>=0.5.8a1,<0.6.0
-erc20-faucet>=0.2.4a1,<0.3.0
+erc20-faucet>=0.2.4a2,<0.3.0
 erc20-transfer-authorization>=0.3.4a1,<0.4.0
 sarafu-faucet>=0.0.5a2,<0.1.0
 moolb~=0.1.1b2
diff --git a/apps/cic-eth/tests/unit/ext/test_ext_tx.py b/apps/cic-eth/tests/task/test_ext_tx.py
similarity index 84%
rename from apps/cic-eth/tests/unit/ext/test_ext_tx.py
rename to apps/cic-eth/tests/task/test_ext_tx.py
index 90241865..2740bf20 100644
--- a/apps/cic-eth/tests/unit/ext/test_ext_tx.py
+++ b/apps/cic-eth/tests/task/test_ext_tx.py
@@ -20,7 +20,6 @@ from cic_eth.db.models.nonce import (
 logg = logging.getLogger()
 
 
-# TODO: This test fails when not run alone. Identify which fixture leaves a dirty state
 def test_filter_process(
         init_database,
         default_chain_spec,
@@ -48,10 +47,10 @@ def test_filter_process(
     eth_rpc.do(o)
     o = receipt(tx_hash_hex)
     r = eth_rpc.do(o)
-    a = r['block_number']
-    b.add(a.to_bytes(4, 'big'))
-    a = r['block_number'] + r['transaction_index']
-    t.add(a.to_bytes(4, 'big'))
+    block_bytes = r['block_number'].to_bytes(4, 'big')
+    b.add(block_bytes)
+    tx_index_bytes = r['transaction_index'].to_bytes(4, 'big')
+    t.add(block_bytes + tx_index_bytes)
     tx_hashes.append(tx_hash_hex)
 
     # external tx
@@ -61,10 +60,10 @@ def test_filter_process(
     eth_rpc.do(o)
     o = receipt(tx_hash_hex)
     r = eth_rpc.do(o)
-    a = r['block_number']
-    b.add(a.to_bytes(4, 'big'))
-    a = r['block_number'] + r['transaction_index']
-    t.add(a.to_bytes(4, 'big'))
+    block_bytes = r['block_number'].to_bytes(4, 'big')
+    b.add(block_bytes)
+    tx_index_bytes = r['transaction_index'].to_bytes(4, 'big')
+    t.add(block_bytes + tx_index_bytes)
     tx_hashes.append(tx_hash_hex)
 
     init_eth_tester.mine_blocks(10)
diff --git a/apps/cic-eth/tests/testdata/config/test.ini b/apps/cic-eth/tests/testdata/config/test.ini
new file mode 100644
index 00000000..70903618
--- /dev/null
+++ b/apps/cic-eth/tests/testdata/config/test.ini
@@ -0,0 +1,2 @@
+[foo]
+bar_baz = xyzzy
diff --git a/apps/cic-eth/tests/unit/cli/test_cli_args.py b/apps/cic-eth/tests/unit/cli/test_cli_args.py
new file mode 100644
index 00000000..bda5f183
--- /dev/null
+++ b/apps/cic-eth/tests/unit/cli/test_cli_args.py
@@ -0,0 +1,56 @@
+# standard imports
+import os
+import logging
+
+# external imports
+import chainlib.cli 
+
+# local imports
+import cic_eth.cli
+
+logg = logging.getLogger()
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+#config_dir = os.path.join(script_dir, '..', '..', 'testdata', 'config')
+
+
+def test_argumentparser_to_config():
+
+    argparser = cic_eth.cli.ArgumentParser()
+    
+    local_flags = 0xffff
+    argparser.process_local_flags(local_flags) 
+    argparser.add_argument('--foo', type=str)
+    args = argparser.parse_args([
+        '--redis-host', 'foo',
+        '--redis-port', '123',
+        '--redis-db', '0',
+        '--redis-host-callback', 'bar',
+        '--redis-port-callback', '456',
+        '--redis-timeout', '10.0',
+        '-q', 'baz',
+        '--offset', '13',
+        '--no-history',
+        '-r','0xdeadbeef',
+        '-vv',
+        '--foo', 'bar',
+        ])
+
+    extra_args = {
+            'foo': '_BARBARBAR',
+            }
+    #config = cic_eth.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
+    config = cic_eth.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args)
+
+    assert config.get('_BARBARBAR') == 'bar'
+    assert config.get('REDIS_HOST') == 'foo'
+    assert config.get('REDIS_PORT') == 123
+    assert config.get('REDIS_DB') == 0
+    assert config.get('_REDIS_HOST_CALLBACK') == 'bar'
+    assert config.get('_REDIS_PORT_CALLBACK') == 456
+    assert config.get('REDIS_TIMEOUT') == 10.0
+    assert config.get('CELERY_QUEUE') == 'baz'
+    assert config.get('SYNCER_NO_HISTORY') == True
+    assert config.get('SYNCER_OFFSET') == 13
+    assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'
+
diff --git a/apps/cic-eth/tests/unit/cli/test_cli_celery.py b/apps/cic-eth/tests/unit/cli/test_cli_celery.py
new file mode 100644
index 00000000..1727008b
--- /dev/null
+++ b/apps/cic-eth/tests/unit/cli/test_cli_celery.py
@@ -0,0 +1,17 @@
+# standard imports
+import tempfile
+
+# local imports
+import cic_eth.cli
+
+
+def test_cli_celery():
+    cf = tempfile.mkdtemp()
+
+    config = {
+            'CELERY_RESULT_URL': 'filesystem://' + cf,
+            }
+    cic_eth.cli.CeleryApp.from_config(config)
+
+    config['CELERY_BROKER_URL'] = 'filesystem://' + cf
+    cic_eth.cli.CeleryApp.from_config(config)
diff --git a/apps/cic-eth/tests/unit/cli/test_cli_chain.py b/apps/cic-eth/tests/unit/cli/test_cli_chain.py
new file mode 100644
index 00000000..8ebebc15
--- /dev/null
+++ b/apps/cic-eth/tests/unit/cli/test_cli_chain.py
@@ -0,0 +1,64 @@
+# external imports
+import pytest
+from chainlib.eth.gas import (
+        Gas,
+        RPCGasOracle,
+        )
+from chainlib.eth.nonce import RPCNonceOracle
+from chainlib.eth.block import (
+        block_latest,
+        Block,
+        )
+
+# local imports
+import cic_eth.cli
+
+
+@pytest.mark.xfail()
+def test_cli_rpc(
+        eth_rpc,
+        eth_signer,
+        default_chain_spec,
+        ):
+    config = {
+        'CHAIN_SPEC': str(default_chain_spec),
+        'RPC_HTTP_PROVIDER': 'http://localhost:8545',
+            }
+    rpc = cic_eth.cli.RPC.from_config(config, default_label='foo') 
+    conn = rpc.get_by_label('foo')
+    #o = block_latest()
+    #conn.do(o)
+
+
+def test_cli_chain(
+        default_chain_spec,
+        eth_rpc,
+        eth_signer,
+        contract_roles,
+        agent_roles,
+        ):
+    ifc = cic_eth.cli.EthChainInterface()
+
+    nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
+    gas_oracle = RPCGasOracle(conn=eth_rpc)
+    c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
+    (tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], agent_roles['ALICE'], 1024)
+    r = eth_rpc.do(o)
+
+    o = ifc.tx_receipt(r)
+    r = eth_rpc.do(o)
+    assert r['status'] == 1
+
+    o = ifc.block_by_number(1)
+    block_src = eth_rpc.do(o)
+    block = ifc.block_from_src(block_src)
+    assert block.number == 1
+
+    with pytest.raises(KeyError):
+        assert block_src['gasUsed'] == 21000
+        assert block_src['gas_used'] == 21000
+
+    block_src = ifc.src_normalize(block_src)
+    assert block_src['gasUsed'] == 21000
+    assert block_src['gas_used'] == 21000
+
diff --git a/docker-compose.yml b/docker-compose.yml
index 57b6be2d..147ea44f 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -238,7 +238,7 @@ services:
       - |
         if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi 
         "/usr/local/bin/uwsgi" \
-        --wsgi-file /usr/src/cic-cache/cic_cache/runnable/daemons/server.py \
+        --wsgi-file /root/cic_cache/runnable/daemons/server.py \
         --http :8000 \
         --pyargv "-vv"