From 94912e739c51d2fa4d5f9de878d0b0f0544a4459 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Wed, 12 Feb 2020 19:40:55 -0800 Subject: pdftrio basic python code This is basically just a copy/paste of GROBID code, only simpler! --- python/pdftrio_tool.py | 118 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100755 python/pdftrio_tool.py (limited to 'python/pdftrio_tool.py') diff --git a/python/pdftrio_tool.py b/python/pdftrio_tool.py new file mode 100755 index 0000000..843c214 --- /dev/null +++ b/python/pdftrio_tool.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 + +""" +Basically just a copy of grobid_tool.py, but for PDF classification instead of +text extraction. + +Example of large parallel run, locally: + + cat /srv/sandcrawler/tasks/something.cdx | pv -l | parallel -j30 --pipe ./pdftrio_tool.py --kafka-env prod --kafka-hosts wbgrp-svc263.us.archive.org:9092,wbgrp-svc284.us.archive.org:9092,wbgrp-svc285.us.archive.org:9092 --kafka-mode --pdftrio-host http://localhost:3939 -j0 classify-pdf-json - +""" + +import sys +import json +import argparse +import datetime + +from sandcrawler import * + + +def run_classify_pdf_json(args): + pdftrio_client = PdfTrioClient(host_url=args.pdftrio_host) + wayback_client = WaybackClient() + if args.jobs > 1: + worker = PdfTrioWorker(pdftrio_client, wayback_client, sink=None) + multi_worker = MultiprocessWrapper(worker, args.sink) + pusher = JsonLinePusher(multi_worker, args.json_file, batch_size=args.jobs) + else: + worker = PdfTrioWorker(pdftrio_client, wayback_client, sink=args.sink) + pusher = JsonLinePusher(worker, args.json_file) + pusher.run() + +def run_classify_pdf_cdx(args): + pdftrio_client = PdfTrioClient(host_url=args.pdftrio_host) + wayback_client = WaybackClient() + if args.jobs > 1: + worker = PdfTrioWorker(pdftrio_client, wayback_client, sink=None) + multi_worker = MultiprocessWrapper(worker, args.sink) + pusher = CdxLinePusher( + multi_worker, + args.cdx_file, + filter_http_statuses=[200, 226], + filter_mimetypes=['application/pdf'], + batch_size=args.jobs, + ) + else: + worker = PdfTrioWorker(pdftrio_client, wayback_client, sink=args.sink) + pusher = CdxLinePusher( + worker, + args.cdx_file, + filter_http_statuses=[200, 226], + filter_mimetypes=['application/pdf'], + ) + pusher.run() + +def run_classify_pdf_zipfile(args): + pdftrio_client = PdfTrioClient(host_url=args.pdftrio_host) + worker = PdfTrioBlobWorker(pdftrio_client, sink=args.sink) + pusher = ZipfilePusher(worker, args.zip_file) + pusher.run() + + +def main(): + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('--kafka-mode', + action='store_true', + help="send output to Kafka (not stdout)") + parser.add_argument('--kafka-hosts', + default="localhost:9092", + help="list of Kafka brokers (host/port) to use") + parser.add_argument('--kafka-env', + default="dev", + help="Kafka topic namespace to use (eg, prod, qa, dev)") + parser.add_argument('-j', '--jobs', + default=8, type=int, + help="parallelism for batch CPU jobs") + parser.add_argument('--pdftrio-host', + default="http://pdftrio.qa.fatcat.wiki", + help="pdftrio API host/port") + subparsers = parser.add_subparsers() + + sub_classify_pdf_json = subparsers.add_parser('classify-pdf-json', + help="for each JSON line with CDX info, fetches PDF and does pdftrio classify_pdfion") + sub_classify_pdf_json.set_defaults(func=run_classify_pdf_json) + sub_classify_pdf_json.add_argument('json_file', + help="JSON file to import from (or '-' for stdin)", + type=argparse.FileType('r')) + + sub_classify_pdf_cdx = subparsers.add_parser('classify-pdf-cdx', + help="for each CDX line, fetches PDF and does pdftrio classify_pdfion") + sub_classify_pdf_cdx.set_defaults(func=run_classify_pdf_cdx) + sub_classify_pdf_cdx.add_argument('cdx_file', + help="CDX file to import from (or '-' for stdin)", + type=argparse.FileType('r')) + + sub_classify_pdf_zipfile = subparsers.add_parser('classify-pdf-zipfile', + help="opens zipfile, iterates over PDF files inside and does pdftrio classify_pdf for each") + sub_classify_pdf_zipfile.set_defaults(func=run_classify_pdf_zipfile) + sub_classify_pdf_zipfile.add_argument('zip_file', + help="zipfile with PDFs to classify", + type=str) + + args = parser.parse_args() + if not args.__dict__.get("func"): + print("tell me what to do!") + sys.exit(-1) + + args.sink = None + if args.kafka_mode: + produce_topic = "sandcrawler-{}.pdftrio-output".format(args.kafka_env) + print("Running in kafka output mode, publishing to {}\n".format(produce_topic)) + args.sink = KafkaSink(kafka_hosts=args.kafka_hosts, + produce_topic=produce_topic) + + args.func(args) + +if __name__ == '__main__': + main() -- cgit v1.2.3