From bb28a3fc1cc900f2dde31e1dbc492d9661034f41 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Thu, 15 Nov 2018 13:11:52 -0800 Subject: large refactor of python names/paths - Add __init__.py files for fatcat_tools submodules, and use them in imports - Add a bunch of comments to files. - rename a number of classes and functions to be less verbose --- python/fatcat_import.py | 76 ++++++++++++++++++++++++------------------------- 1 file changed, 37 insertions(+), 39 deletions(-) (limited to 'python/fatcat_import.py') diff --git a/python/fatcat_import.py b/python/fatcat_import.py index 0ec0cfa8..a5527b8c 100755 --- a/python/fatcat_import.py +++ b/python/fatcat_import.py @@ -2,36 +2,34 @@ import sys import argparse -from fatcat_tools.importers.crossref import FatcatCrossrefImporter -from fatcat_tools.importers.orcid import FatcatOrcidImporter -from fatcat_tools.importers.issn import FatcatIssnImporter -from fatcat_tools.importers.matched import FatcatMatchedImporter -from fatcat_tools.importers.grobid_metadata import FatcatGrobidMetadataImporter +from fatcat_tools.importers import CrossrefImporter, OrcidImporter, \ + IssnImporter, MatchedImporter, GrobidMetadataImporter -def run_import_crossref(args): - fci = FatcatCrossrefImporter(args.host_url, args.issn_map_file, + +def run_crossref(args): + fci = CrossrefImporter(args.host_url, args.issn_map_file, args.extid_map_file, create_containers=(not args.no_create_containers)) fci.process_batch(args.json_file, size=args.batch_size) fci.describe_run() -def run_import_orcid(args): - foi = FatcatOrcidImporter(args.host_url) +def run_orcid(args): + foi = OrcidImporter(args.host_url) foi.process_batch(args.json_file, size=args.batch_size) foi.describe_run() -def run_import_issn(args): - fii = FatcatIssnImporter(args.host_url) +def run_issn(args): + fii = IssnImporter(args.host_url) fii.process_csv_batch(args.csv_file, size=args.batch_size) fii.describe_run() -def run_import_matched(args): - fmi = FatcatMatchedImporter(args.host_url, +def run_matched(args): + fmi = MatchedImporter(args.host_url, skip_file_update=args.no_file_update) fmi.process_batch(args.json_file, size=args.batch_size) fmi.describe_run() -def run_import_grobid_metadata(args): - fmi = FatcatGrobidMetadataImporter(args.host_url) +def run_grobid_metadata(args): + fmi = GrobidMetadataImporter(args.host_url) fmi.process_source(args.tsv_file, group_size=args.group_size) fmi.describe_run() @@ -45,60 +43,60 @@ def main(): help="connect to this host/port") subparsers = parser.add_subparsers() - sub_import_crossref = subparsers.add_parser('import-crossref') - sub_import_crossref.set_defaults(func=run_import_crossref) - sub_import_crossref.add_argument('json_file', + sub_crossref = subparsers.add_parser('crossref') + sub_crossref.set_defaults(func=run_crossref) + sub_crossref.add_argument('json_file', help="crossref JSON file to import from", default=sys.stdin, type=argparse.FileType('r')) - sub_import_crossref.add_argument('issn_map_file', + sub_crossref.add_argument('issn_map_file', help="ISSN to ISSN-L mapping file", default=None, type=argparse.FileType('r')) - sub_import_crossref.add_argument('extid_map_file', + sub_crossref.add_argument('extid_map_file', help="DOI-to-other-identifiers sqlite3 database", default=None, type=str) - sub_import_crossref.add_argument('--no-create-containers', + sub_crossref.add_argument('--no-create-containers', action='store_true', help="skip creation of new container entities based on ISSN") - sub_import_crossref.add_argument('--batch-size', + sub_crossref.add_argument('--batch-size', help="size of batch to send", default=50, type=int) - sub_import_orcid = subparsers.add_parser('import-orcid') - sub_import_orcid.set_defaults(func=run_import_orcid) - sub_import_orcid.add_argument('json_file', + sub_orcid = subparsers.add_parser('orcid') + sub_orcid.set_defaults(func=run_orcid) + sub_orcid.add_argument('json_file', help="orcid JSON file to import from (or stdin)", default=sys.stdin, type=argparse.FileType('r')) - sub_import_orcid.add_argument('--batch-size', + sub_orcid.add_argument('--batch-size', help="size of batch to send", default=50, type=int) - sub_import_issn = subparsers.add_parser('import-issn') - sub_import_issn.set_defaults(func=run_import_issn) - sub_import_issn.add_argument('csv_file', + sub_issn = subparsers.add_parser('issn') + sub_issn.set_defaults(func=run_issn) + sub_issn.add_argument('csv_file', help="Journal ISSN CSV metadata file to import from (or stdin)", default=sys.stdin, type=argparse.FileType('r')) - sub_import_issn.add_argument('--batch-size', + sub_issn.add_argument('--batch-size', help="size of batch to send", default=50, type=int) - sub_import_matched = subparsers.add_parser('import-matched') - sub_import_matched.set_defaults(func=run_import_matched) - sub_import_matched.add_argument('json_file', + sub_matched = subparsers.add_parser('matched') + sub_matched.set_defaults(func=run_matched) + sub_matched.add_argument('json_file', help="JSON file to import from (or stdin)", default=sys.stdin, type=argparse.FileType('r')) - sub_import_matched.add_argument('--no-file-update', + sub_matched.add_argument('--no-file-update', action='store_true', help="don't lookup existing files, just insert (only for bootstrap)") - sub_import_matched.add_argument('--batch-size', + sub_matched.add_argument('--batch-size', help="size of batch to send", default=50, type=int) - sub_import_grobid_metadata = subparsers.add_parser('import-grobid-metadata') - sub_import_grobid_metadata.set_defaults(func=run_import_grobid_metadata) - sub_import_grobid_metadata.add_argument('tsv_file', + sub_grobid_metadata = subparsers.add_parser('grobid-metadata') + sub_grobid_metadata.set_defaults(func=run_grobid_metadata) + sub_grobid_metadata.add_argument('tsv_file', help="TSV file to import from (or stdin)", default=sys.stdin, type=argparse.FileType('r')) - sub_import_grobid_metadata.add_argument('--group-size', + sub_grobid_metadata.add_argument('--group-size', help="editgroup group size to use", default=75, type=int) -- cgit v1.2.3