cic-chain-events/internal/pipeline/pipeline.go

70 lines
1.7 KiB
Go
Raw Normal View History

2023-01-05 12:45:09 +01:00
package pipeline
import (
"context"
2023-01-05 12:45:09 +01:00
"github.com/grassrootseconomics/cic-chain-events/internal/fetch"
"github.com/grassrootseconomics/cic-chain-events/internal/filter"
"github.com/grassrootseconomics/cic-chain-events/internal/store"
"github.com/jackc/pgx/v5"
"github.com/zerodha/logf"
)
type PipelineOpts struct {
BlockFetcher fetch.Fetch
Filters []filter.Filter
Logg logf.Logger
Store store.Store[pgx.Rows]
}
type Pipeline struct {
fetch fetch.Fetch
filters []filter.Filter
logg logf.Logger
store store.Store[pgx.Rows]
}
func NewPipeline(o PipelineOpts) *Pipeline {
return &Pipeline{
fetch: o.BlockFetcher,
filters: o.Filters,
logg: o.Logg,
store: o.Store,
}
}
// Run is the task executor which runs in its own goroutine and does the following:
// 1. Fetches the block and all transactional data
// 2. Passes the block through all filters
// 3. Commits the block to store as successfully processed
//
// Note:
// - Blocks are processed atomically, a failure in-between will process the block from the start
// - Therefore, any side effect/event sink in the filter should support dedup
func (md *Pipeline) Run(ctx context.Context, blockNumber uint64) error {
md.logg.Debug("pipeline: processing block", "block", blockNumber)
fetchResp, err := md.fetch.Block(ctx, blockNumber)
2023-01-05 12:45:09 +01:00
if err != nil {
return err
}
for _, tx := range fetchResp.Data.Block.Transactions {
for _, filter := range md.filters {
next, err := filter.Execute(ctx, tx)
2023-01-05 12:45:09 +01:00
if err != nil {
return err
}
if !next {
break
2023-01-05 12:45:09 +01:00
}
}
}
if err := md.store.CommitBlock(ctx, blockNumber); err != nil {
2023-01-05 12:45:09 +01:00
return err
}
md.logg.Debug("pipeline: committed block", "block", blockNumber)
2023-01-05 12:45:09 +01:00
return nil
}