diff options
author | Bryan Newbold <bnewbold@robocracy.org> | 2019-12-12 10:15:05 -0800 |
---|---|---|
committer | Bryan Newbold <bnewbold@robocracy.org> | 2019-12-12 17:50:39 -0800 |
commit | 9be73bd3b5323cb5a1ac3b63c392d343c18a5a8c (patch) | |
tree | 7b11e326c9be7667b3df676ba4295a1017a6df66 /python/fatcat_import.py | |
parent | 12d1df8d0e0c55bd71f5348d10836c135a0d6210 (diff) | |
download | fatcat-9be73bd3b5323cb5a1ac3b63c392d343c18a5a8c.tar.gz fatcat-9be73bd3b5323cb5a1ac3b63c392d343c18a5a8c.zip |
savepapernow result importer
Based on ingest-file-results importer
Diffstat (limited to 'python/fatcat_import.py')
-rwxr-xr-x | python/fatcat_import.py | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/python/fatcat_import.py b/python/fatcat_import.py index 04f58ff7..8d82dab3 100755 --- a/python/fatcat_import.py +++ b/python/fatcat_import.py @@ -105,6 +105,17 @@ def run_ingest_file(args): else: JsonLinePusher(ifri, args.json_file).run() +def run_savepapernow_file(args): + ifri = SavePaperNowFileImporter(args.api, + editgroup_description=args.editgroup_description_override, + edit_batch_size=args.batch_size) + if args.kafka_mode: + KafkaJsonPusher(ifri, args.kafka_hosts, args.kafka_env, "ingest-file-results", + "savepapernow-file-result", kafka_namespace="sandcrawler", + consume_batch_size=args.batch_size).run() + else: + JsonLinePusher(ifri, args.json_file).run() + def run_grobid_metadata(args): fmi = GrobidMetadataImporter(args.api, edit_batch_size=args.batch_size, @@ -361,6 +372,19 @@ def main(): default="web", help="default URL rel for matches (eg, 'publisher', 'web')") + sub_savepapernow_file = subparsers.add_parser('savepapernow-file-results', + help="add file entities crawled due to async Save Paper Now request") + sub_savepapernow_file.set_defaults( + func=run_savepapernow_file, + auth_var="FATCAT_AUTH_WORKER_SAVEPAPERNOW", + ) + sub_savepapernow_file.add_argument('json_file', + help="ingest-file JSON file to import from", + default=sys.stdin, type=argparse.FileType('r')) + sub_savepapernow_file.add_argument('--kafka-mode', + action='store_true', + help="consume from kafka topic (not stdin)") + sub_grobid_metadata = subparsers.add_parser('grobid-metadata', help="create release and file entities based on GROBID PDF metadata extraction") sub_grobid_metadata.set_defaults( |