diff options
author | Bryan Newbold <bnewbold@robocracy.org> | 2021-10-01 17:39:40 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@robocracy.org> | 2021-10-01 17:39:43 -0700 |
commit | b72c18e3518e827bd09044deaadcbf0b0ca50335 (patch) | |
tree | 9c79a4907971ea3f940a4069d5c3c48d4acb6361 /python | |
parent | 9618d5146eea046342b69895e68b937a056d2816 (diff) | |
download | fatcat-b72c18e3518e827bd09044deaadcbf0b0ca50335.tar.gz fatcat-b72c18e3518e827bd09044deaadcbf0b0ca50335.zip |
kafka import: optional 'force-flush' mode for some importers
Behavior and motivation described in the kafka json import comment.
Diffstat (limited to 'python')
-rwxr-xr-x | python/fatcat_import.py | 3 | ||||
-rw-r--r-- | python/fatcat_tools/importers/common.py | 13 |
2 files changed, 16 insertions, 0 deletions
diff --git a/python/fatcat_import.py b/python/fatcat_import.py index 7e790fa4..b82e81c7 100755 --- a/python/fatcat_import.py +++ b/python/fatcat_import.py @@ -159,6 +159,7 @@ def run_ingest_web(args): "fatcat-{}-ingest-web-result".format(args.kafka_env), kafka_namespace="sandcrawler", consume_batch_size=args.batch_size, + force_flush=True, ).run() else: JsonLinePusher(iwri, args.json_file).run() @@ -176,6 +177,7 @@ def run_savepapernow_file(args): "fatcat-{}-savepapernow-file-result".format(args.kafka_env), kafka_namespace="sandcrawler", consume_batch_size=args.batch_size, + force_flush=True, ).run() else: JsonLinePusher(ifri, args.json_file).run() @@ -193,6 +195,7 @@ def run_savepapernow_web(args): "fatcat-{}-savepapernow-web-result".format(args.kafka_env), kafka_namespace="sandcrawler", consume_batch_size=args.batch_size, + force_flush=True, ).run() else: JsonLinePusher(ifri, args.json_file).run() diff --git a/python/fatcat_tools/importers/common.py b/python/fatcat_tools/importers/common.py index 6ca9a50c..e936477c 100644 --- a/python/fatcat_tools/importers/common.py +++ b/python/fatcat_tools/importers/common.py @@ -779,10 +779,12 @@ class KafkaJsonPusher(RecordPusher): ) self.poll_interval = kwargs.get('poll_interval', 5.0) self.consume_batch_size = kwargs.get('consume_batch_size', 100) + self.force_flush = kwargs.get('force_flush', False) def run(self): count = 0 last_push = datetime.datetime.now() + last_force_flush = datetime.datetime.now() while True: # Note: this is batch-oriented, because underlying importer is # often batch-oriented, but this doesn't confirm that entire batch @@ -798,11 +800,22 @@ class KafkaJsonPusher(RecordPusher): timeout=self.poll_interval) print("... got {} kafka messages ({}sec poll interval) {}".format( len(batch), self.poll_interval, self.importer.counts)) + if self.force_flush: + # this flushing happens even if there have been 'push' events + # more recently. it is intended for, eg, importers off the + # ingest file stream *other than* the usual file importer. the + # web/HTML and savepapernow importers get frequent messages, + # but rare 'want() == True', so need an extra flush + if datetime.datetime.now() - last_force_flush > datetime.timedelta(minutes=5): + self.importer.finish() + last_push = datetime.datetime.now() + last_force_flush = datetime.datetime.now() if not batch: if datetime.datetime.now() - last_push > datetime.timedelta(minutes=5): # it has been some time, so flush any current editgroup self.importer.finish() last_push = datetime.datetime.now() + last_force_flush = datetime.datetime.now() #print("Flushed any partial import batch: {}".format(self.importer.counts)) continue # first check errors on entire batch... |