From 7c49fecd01cbd89bb5987442b89a4aafc1186ff9 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 20 Aug 2018 18:50:07 -0700 Subject: make col counter generic --- .../scala/sandcrawler/GrobidMetadataCountJob.scala | 36 --------------------- .../main/scala/sandcrawler/HBaseColCountJob.scala | 37 ++++++++++++++++++++++ 2 files changed, 37 insertions(+), 36 deletions(-) delete mode 100644 scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala create mode 100644 scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala (limited to 'scalding') diff --git a/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala b/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala deleted file mode 100644 index 08f3340..0000000 --- a/scalding/src/main/scala/sandcrawler/GrobidMetadataCountJob.scala +++ /dev/null @@ -1,36 +0,0 @@ -package sandcrawler - -import java.util.Properties - -import cascading.property.AppProps -import cascading.tuple.Fields -import com.twitter.scalding._ -import parallelai.spyglass.base.JobBase -import parallelai.spyglass.hbase.HBaseConstants.SourceMode -import parallelai.spyglass.hbase.HBasePipeConversions -import parallelai.spyglass.hbase.HBaseSource - -class GrobidMetadataCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { - - val output = args("output") - - GrobidMetadataCountJob.getHBaseSource( - args("hbase-table"), - args("zookeeper-hosts")) - .read - .debug - .groupAll { _.size('count) } - .write(Tsv(output)) -} - -object GrobidMetadataCountJob { - - // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" - def getHBaseSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { - HBaseBuilder.build( - hbaseTable, - zookeeperHosts, - List("grobid0:metadata"), - SourceMode.SCAN_ALL) - } -} diff --git a/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala new file mode 100644 index 0000000..a007339 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala @@ -0,0 +1,37 @@ +package sandcrawler + +import java.util.Properties + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBasePipeConversions +import parallelai.spyglass.hbase.HBaseSource + +class HBaseColCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { + + val output = args("output") + + HBaseColCountJob.getHBaseSource( + args("hbase-table"), + args("zookeeper-hosts"), + args("column") + .read + .debug + .groupAll { _.size('count) } + .write(Tsv(output)) +} + +object HBaseColCountJob { + + // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" + def getHBaseSource(hbaseTable: String, zookeeperHosts: String, col: String) : HBaseSource = { + HBaseBuilder.build( + hbaseTable, + zookeeperHosts, + List(col), + SourceMode.SCAN_ALL) + } +} -- cgit v1.2.3