From 9514b6a3620a98e4fc069ca31b77eac6f9c98bec Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 20 Aug 2018 18:45:21 -0700 Subject: add dedicated job for counting GrobidMetadata column --- .../scala/sandcrawler/GrobidMetadataCountJob.scala | 36 ++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala (limited to 'scalding/src/main') diff --git a/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala b/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala new file mode 100644 index 0000000..08f3340 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala @@ -0,0 +1,36 @@ +package sandcrawler + +import java.util.Properties + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBasePipeConversions +import parallelai.spyglass.hbase.HBaseSource + +class GrobidMetadataCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { + + val output = args("output") + + GrobidMetadataCountJob.getHBaseSource( + args("hbase-table"), + args("zookeeper-hosts")) + .read + .debug + .groupAll { _.size('count) } + .write(Tsv(output)) +} + +object GrobidMetadataCountJob { + + // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" + def getHBaseSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { + HBaseBuilder.build( + hbaseTable, + zookeeperHosts, + List("grobid0:metadata"), + SourceMode.SCAN_ALL) + } +} -- cgit v1.2.3