aboutsummaryrefslogtreecommitdiffstats
path: root/scald-mvp/src/main/scala/sandcrawler/HBaseRowCountJob.scala
blob: 162f729c6ac96b6a2d861126906ec4a51fe9076a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
package sandcrawler

import com.twitter.scalding._
import parallelai.spyglass.base.JobBase
import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions, HBaseConstants}
import parallelai.spyglass.hbase.HBaseConstants.SourceMode
import cascading.tuple.Fields
import cascading.property.AppProps
import java.util.Properties


class HBaseRowCountJob(args: Args) extends JobBase(args) with HBasePipeConversions {


  // For now doesn't actually count, just dumps a "word count"

  val output = args("output")

  val hbs = new HBaseSource(
    "wbgrp-journal-extract-0-qa",     // HBase Table Name
    "mtrcs-zk1.us.archive.org:2181",  // HBase Zookeeper server (to get runtime config info; can be array?)
    new Fields("key"),
    sourceMode = SourceMode.GET_LIST, keyList = List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", "sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU"))
    .read
    .debug
    .fromBytesWritable(new Fields("key"))
    .write(Tsv(output format "get_list"))

    /*
    List("column_family"),
    sourceMode = SourceMode.SCAN_ALL)
    .read
    .debug
    .fromBytesWritable(new Fields("key"))
    .write(Tsv(output format "get_list"))
    */
}