diff options
-rwxr-xr-x | please | 24 |
1 files changed, 24 insertions, 0 deletions
@@ -144,6 +144,27 @@ def run_matchcrossref(args): crossref_input=args.crossref_input) subprocess.call(cmd, shell=True) +def run_grobidscorabledump(args): + if args.rebuild: + rebuild_scalding() + print("Starting grobid-scorable-dump job...") + output = "{}/output-{}/{}-grobidscorabledump".format( + HDFS_DIR, + args.env, + datetime.strftime(datetime.now(), "%Y-%m-%d-%H%M.%S")) + cmd = """hadoop jar \ + scalding/target/scala-2.11/sandcrawler-assembly-0.2.0-SNAPSHOT.jar \ + com.twitter.scalding.Tool sandcrawler.GrobidScorableDumpJob \ + --hdfs \ + --app.conf.path scalding/ia_cluster.conf \ + --hbase-table wbgrp-journal-extract-0-{env} \ + --zookeeper-hosts {zookeeper_hosts} \ + --output {output}""".format( + output=output, + zookeeper_hosts=ZOOKEEPER_HOSTS, + env=args.env) + subprocess.call(cmd, shell=True) + def main(): parser = argparse.ArgumentParser() @@ -182,6 +203,9 @@ def main(): help="number of reducers to run", type=int, default=30) + sub_grobidscorabledump = subparsers.add_parser('grobid-scorable-dump') + sub_grobidscorabledump.set_defaults(func=run_grobidscorabledump) + args = parser.parse_args() if not args.__dict__.get("func"): |