aboutsummaryrefslogtreecommitdiffstats
path: root/python/fatcat_import.py
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2021-10-01 17:33:42 -0700
committerBryan Newbold <bnewbold@robocracy.org>2021-10-01 17:33:42 -0700
commit9618d5146eea046342b69895e68b937a056d2816 (patch)
treee5ff7d221e45206dcc213c9dfc98518c502cc28b /python/fatcat_import.py
parent6e0736cebcb2b1e5ddbae03127572ad9d1ffca49 (diff)
downloadfatcat-9618d5146eea046342b69895e68b937a056d2816.tar.gz
fatcat-9618d5146eea046342b69895e68b937a056d2816.zip
new SPN web (html) importer
Diffstat (limited to 'python/fatcat_import.py')
-rwxr-xr-xpython/fatcat_import.py30
1 files changed, 30 insertions, 0 deletions
diff --git a/python/fatcat_import.py b/python/fatcat_import.py
index 1dcfec21..7e790fa4 100755
--- a/python/fatcat_import.py
+++ b/python/fatcat_import.py
@@ -180,6 +180,23 @@ def run_savepapernow_file(args):
else:
JsonLinePusher(ifri, args.json_file).run()
+def run_savepapernow_web(args):
+ ifri = SavePaperNowWebImporter(args.api,
+ editgroup_description=args.editgroup_description_override,
+ edit_batch_size=args.batch_size)
+ if args.kafka_mode:
+ KafkaJsonPusher(
+ ifri,
+ args.kafka_hosts,
+ args.kafka_env,
+ "ingest-file-results",
+ "fatcat-{}-savepapernow-web-result".format(args.kafka_env),
+ kafka_namespace="sandcrawler",
+ consume_batch_size=args.batch_size,
+ ).run()
+ else:
+ JsonLinePusher(ifri, args.json_file).run()
+
def run_grobid_metadata(args):
fmi = GrobidMetadataImporter(args.api,
edit_batch_size=args.batch_size,
@@ -554,6 +571,19 @@ def main():
action='store_true',
help="consume from kafka topic (not stdin)")
+ sub_savepapernow_web = subparsers.add_parser('savepapernow-web-results',
+ help="add webcapture entities crawled due to async Save Paper Now request")
+ sub_savepapernow_web.set_defaults(
+ func=run_savepapernow_web,
+ auth_var="FATCAT_AUTH_WORKER_SAVEPAPERNOW",
+ )
+ sub_savepapernow_web.add_argument('json_file',
+ help="ingest-file JSON file to import from",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_savepapernow_web.add_argument('--kafka-mode',
+ action='store_true',
+ help="consume from kafka topic (not stdin)")
+
sub_grobid_metadata = subparsers.add_parser('grobid-metadata',
help="create release and file entities based on GROBID PDF metadata extraction")
sub_grobid_metadata.set_defaults(