aboutsummaryrefslogtreecommitdiffstats
path: root/scalding/src
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2018-07-17 13:48:28 -0700
committerBryan Newbold <bnewbold@archive.org>2018-07-17 13:48:30 -0700
commitd0ed197859dfcadf89f5321939bb5e83e1bee9ed (patch)
tree0537864c01cc75bd62d23b9d64389ea060592693 /scalding/src
parenta478253b8fff5efe1783d6af9c3c65e8dc5c9045 (diff)
downloadsandcrawler-d0ed197859dfcadf89f5321939bb5e83e1bee9ed.tar.gz
sandcrawler-d0ed197859dfcadf89f5321939bb5e83e1bee9ed.zip
lint/cleanup fixes from review
Thanks Ellen!
Diffstat (limited to 'scalding/src')
-rw-r--r--scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala29
1 files changed, 12 insertions, 17 deletions
diff --git a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
index 375d155..dbd444d 100644
--- a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
+++ b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
@@ -1,33 +1,28 @@
package sandcrawler
-import cascading.property.AppProps
-import cascading.tuple.Fields
+
+import com.twitter.scalding.Args
import com.twitter.scalding._
import com.twitter.scalding.typed.TDsl._
-import java.util.Properties
-import parallelai.spyglass.base.JobBase
-import org.apache.hadoop.hbase.util.Bytes
-import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions}
-import parallelai.spyglass.hbase.HBaseConstants.SourceMode
-import com.twitter.scalding.Args
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
+import org.apache.hadoop.hbase.util.Bytes
+import parallelai.spyglass.base.JobBase
+import parallelai.spyglass.hbase.HBasePipeConversions
+
class HBaseStatusCountJob(args: Args) extends JobBase(args) with HBasePipeConversions {
- val colSpec = "grobid0:status_code"
- val output = args("output")
- HBaseBuilder.parseColSpec(colSpec)
- val Col: String = colSpec.split(":")(1)
+ val source = HBaseCountJob.getHBaseSource(args("hbase-table"),
+ args("zookeeper-hosts"),
+ "grobid0:status_code")
- val source : TypedPipe[Long] = HBaseCountJob.getHBaseSource(args("hbase-table"),
- args("zookeeper-hosts"),
- colSpec)
+ val statusPipe : TypedPipe[Long] = source
.read
.toTypedPipe[(ImmutableBytesWritable,ImmutableBytesWritable)]('key, 'status_code)
.map { case (key, raw_code) => Bytes.toLong(raw_code.copyBytes()) }
- source.groupBy { identity }
+ statusPipe.groupBy { identity }
.size
.debug
- .write(TypedTsv[(Long,Long)](output))
+ .write(TypedTsv[(Long,Long)](args("output")))
}