From aafec7a8307342479cacfc5aa6a794068b799823 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 21 May 2018 19:18:52 -0700 Subject: another attempt at a simple job variation --- .../src/main/scala/sandcrawler/HBaseRowCountJob.scala | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) (limited to 'scald-mvp/src') diff --git a/scald-mvp/src/main/scala/sandcrawler/HBaseRowCountJob.scala b/scald-mvp/src/main/scala/sandcrawler/HBaseRowCountJob.scala index 23c4764..162f729 100644 --- a/scald-mvp/src/main/scala/sandcrawler/HBaseRowCountJob.scala +++ b/scald-mvp/src/main/scala/sandcrawler/HBaseRowCountJob.scala @@ -1,11 +1,16 @@ package sandcrawler import com.twitter.scalding._ +import parallelai.spyglass.base.JobBase import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions, HBaseConstants} import parallelai.spyglass.hbase.HBaseConstants.SourceMode import cascading.tuple.Fields +import cascading.property.AppProps +import java.util.Properties + + +class HBaseRowCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { -class HBaseRowCountJob(args: Args) extends Job(args) with HBasePipeConversions { // For now doesn't actually count, just dumps a "word count" @@ -14,11 +19,19 @@ class HBaseRowCountJob(args: Args) extends Job(args) with HBasePipeConversions { val hbs = new HBaseSource( "wbgrp-journal-extract-0-qa", // HBase Table Name "mtrcs-zk1.us.archive.org:2181", // HBase Zookeeper server (to get runtime config info; can be array?) - new Fields("key"), - List("column_family"), + new Fields("key"), + sourceMode = SourceMode.GET_LIST, keyList = List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", "sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU")) + .read + .debug + .fromBytesWritable(new Fields("key")) + .write(Tsv(output format "get_list")) + + /* + List("column_family"), sourceMode = SourceMode.SCAN_ALL) .read .debug .fromBytesWritable(new Fields("key")) .write(Tsv(output format "get_list")) + */ } -- cgit v1.2.3