summaryrefslogtreecommitdiffstats
path: root/python/fatcat_import.py
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-09-14 15:02:52 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-09-14 15:06:47 -0700
commit61caceebcc5cd04b28d9859b27ac314bb2a59bbb (patch)
treeee70241ade0fb769e33b0312873826d243740282 /python/fatcat_import.py
parentac0b49ee3e04d98ad5b6dd8c2360a71d7ecce1a3 (diff)
downloadfatcat-61caceebcc5cd04b28d9859b27ac314bb2a59bbb.tar.gz
fatcat-61caceebcc5cd04b28d9859b27ac314bb2a59bbb.zip
add insert counting to importers
Diffstat (limited to 'python/fatcat_import.py')
-rwxr-xr-xpython/fatcat_import.py22
1 files changed, 22 insertions, 0 deletions
diff --git a/python/fatcat_import.py b/python/fatcat_import.py
index 2f0c746f..525cf286 100755
--- a/python/fatcat_import.py
+++ b/python/fatcat_import.py
@@ -12,18 +12,28 @@ def run_import_crossref(args):
fci = FatcatCrossrefImporter(args.host_url, args.issn_map_file,
args.extid_map_file, create_containers=(not args.no_create_containers))
fci.process_batch(args.json_file, size=args.batch_size)
+ fci.describe_run()
def run_import_orcid(args):
foi = FatcatOrcidImporter(args.host_url)
foi.process_batch(args.json_file, size=args.batch_size)
+ foi.describe_run()
def run_import_issn(args):
fii = FatcatIssnImporter(args.host_url)
fii.process_csv_batch(args.csv_file, size=args.batch_size)
+ fii.describe_run()
def run_import_manifest(args):
fmi = FatcatManifestImporter(args.host_url)
fmi.process_db(args.db_path, size=args.batch_size)
+ fmi.describe_run()
+
+def run_import_matched(args):
+ fmi = FatcatMatchedImporter(args.host_url,
+ skip_file_update=args.no_file_update)
+ fmi.process_db(args.db_path, size=args.batch_size)
+ fmi.describe_run()
def health(args):
rfac = RawFatcatApiClient(args.host_url)
@@ -84,6 +94,18 @@ def main():
help="size of batch to send",
default=50, type=int)
+ sub_import_matched = subparsers.add_parser('import-matched')
+ sub_import_matched.set_defaults(func=run_import_matched)
+ sub_import_matched.add_argument('json_file',
+ help="JSON file to import from (or stdin)",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_import_matched.add_argument('--no-file-update',
+ action='store_true',
+ help="don't lookup existing files, just insert (only for bootstrap)")
+ sub_import_matched.add_argument('--batch-size',
+ help="size of batch to send",
+ default=50, type=int)
+
sub_health = subparsers.add_parser('health')
sub_health.set_defaults(func=health)