diff options
author | Bryan Newbold <bnewbold@robocracy.org> | 2018-11-13 23:48:45 -0800 |
---|---|---|
committer | Bryan Newbold <bnewbold@robocracy.org> | 2018-11-13 23:48:47 -0800 |
commit | 7634f6ecf2361b1cb1cafd4e27fd1fb84d81d130 (patch) | |
tree | 69b18860ed4188c5169e9d9cb174355966b6f7de /python/fatcat_tools/workers/changelog.py | |
parent | 7edae5c9d2267ba5e381ecbf00a7c3f7dacf4194 (diff) | |
download | fatcat-7634f6ecf2361b1cb1cafd4e27fd1fb84d81d130.tar.gz fatcat-7634f6ecf2361b1cb1cafd4e27fd1fb84d81d130.zip |
switch to auto consumer offset updates
This is the classic/correct way to do consumer group updates for higher
throughput, when "at least once" semantics are acceptible (as they are
here; double processing should be safe/fine).
Diffstat (limited to 'python/fatcat_tools/workers/changelog.py')
-rw-r--r-- | python/fatcat_tools/workers/changelog.py | 7 |
1 files changed, 6 insertions, 1 deletions
diff --git a/python/fatcat_tools/workers/changelog.py b/python/fatcat_tools/workers/changelog.py index 92bb8bdd..6319da2f 100644 --- a/python/fatcat_tools/workers/changelog.py +++ b/python/fatcat_tools/workers/changelog.py @@ -102,12 +102,17 @@ class FatcatEntityUpdatesWorker(FatcatWorker): managed=True, auto_offset_reset=OffsetType.LATEST, reset_offset_on_start=False, + fetch_message_max_bytes=4000000, # up to ~4MBytes + auto_commit_enable=True, + auto_commit_interval_ms=30000, # 30 seconds + compacted_topic=True, ) with release_topic.get_sync_producer() as producer: for msg in consumer: cle = json.loads(msg.value.decode('utf-8')) #print(cle) + print("processing changelog index {}".format(cle['index'])) release_edits = cle['editgroup']['edits']['releases'] for re in release_edits: ident = re['ident'] @@ -118,5 +123,5 @@ class FatcatEntityUpdatesWorker(FatcatWorker): partition_key=ident.encode('utf-8'), timestamp=None, ) - consumer.commit_offsets() + #consumer.commit_offsets() |