diff options
author | Bryan Newbold <bnewbold@archive.org> | 2018-07-17 12:07:10 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2018-07-17 12:21:13 -0700 |
commit | a478253b8fff5efe1783d6af9c3c65e8dc5c9045 (patch) | |
tree | 18263cf19a88a01a3e9bef79c6587099a92d4e36 /scalding/src/main | |
parent | 746870a10215549c25a16529eabaeb199a3b9228 (diff) | |
download | sandcrawler-a478253b8fff5efe1783d6af9c3c65e8dc5c9045.tar.gz sandcrawler-a478253b8fff5efe1783d6af9c3c65e8dc5c9045.zip |
refactor HBaseStatusCountJob to convert Long column
Diffstat (limited to 'scalding/src/main')
-rw-r--r-- | scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala | 30 |
1 files changed, 29 insertions, 1 deletions
diff --git a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala index aabf9f8..375d155 100644 --- a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala +++ b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala @@ -1,5 +1,33 @@ package sandcrawler +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import com.twitter.scalding.typed.TDsl._ +import java.util.Properties +import parallelai.spyglass.base.JobBase +import org.apache.hadoop.hbase.util.Bytes +import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions} +import parallelai.spyglass.hbase.HBaseConstants.SourceMode import com.twitter.scalding.Args +import org.apache.hadoop.hbase.io.ImmutableBytesWritable -class HBaseStatusCountJob(args: Args) extends HBaseCountJob(args, "grobid0:status_code") +class HBaseStatusCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { + + val colSpec = "grobid0:status_code" + val output = args("output") + HBaseBuilder.parseColSpec(colSpec) + val Col: String = colSpec.split(":")(1) + + val source : TypedPipe[Long] = HBaseCountJob.getHBaseSource(args("hbase-table"), + args("zookeeper-hosts"), + colSpec) + .read + .toTypedPipe[(ImmutableBytesWritable,ImmutableBytesWritable)]('key, 'status_code) + .map { case (key, raw_code) => Bytes.toLong(raw_code.copyBytes()) } + + source.groupBy { identity } + .size + .debug + .write(TypedTsv[(Long,Long)](output)) +} |