diff options
| author | Bryan Newbold <bnewbold@archive.org> | 2018-08-27 14:32:19 -0700 | 
|---|---|---|
| committer | Bryan Newbold <bnewbold@archive.org> | 2018-08-27 14:32:19 -0700 | 
| commit | a7156b06340460e0e70a19891e161b8b8f4f2078 (patch) | |
| tree | 029a04f6558b99f060e845ab1ef00baa6cd39600 /scalding/src/main/scala | |
| parent | 4c374c647d8fecce827cabcb579e5aae20f198db (diff) | |
| parent | d2b4da4c55a24468a0cbfdc9f567449d4e913331 (diff) | |
| download | sandcrawler-a7156b06340460e0e70a19891e161b8b8f4f2078.tar.gz sandcrawler-a7156b06340460e0e70a19891e161b8b8f4f2078.zip | |
Merge branch 'bnewbold-ungrobided'
Diffstat (limited to 'scalding/src/main/scala')
| -rw-r--r-- | scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala | 67 | 
1 files changed, 67 insertions, 0 deletions
| diff --git a/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala new file mode 100644 index 0000000..7fd3ce0 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala @@ -0,0 +1,67 @@ +package sandcrawler + +import java.util.Properties + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import com.twitter.scalding.typed.TDsl._ +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBasePipeConversions +import parallelai.spyglass.hbase.HBaseSource + +// Filters for HBase rows which have not had GROBID run on them, but do have +// full CDX metadata, and dumps to a TSV for later extraction by the +// "extraction-ungrobided" job. +// +// Does the same horrible join thing that DumpUnGrobidedJob does. +class DumpUnGrobidedJob(args: Args) extends JobBase(args) with HBasePipeConversions { + +  val output = args("output") + +  val allKeys : TypedPipe[(String,String,String,String)] = DumpUnGrobidedJob.getHBaseKeySource( +    args("hbase-table"), +    args("zookeeper-hosts")) +    .read +    .fromBytesWritable('key, 'c, 'mime, 'cdx) +    .toTypedPipe[(String,String,String,String)]('key, 'c, 'mime, 'cdx) + +  val existingKeys : TypedPipe[(String,Boolean)] = DumpUnGrobidedJob.getHBaseColSource( +    args("hbase-table"), +    args("zookeeper-hosts")) +    .read +    .fromBytesWritable('key) +    .toTypedPipe[String]('key) +    .map{ key => (key, true) } + +  val missingKeys : TypedPipe[(String,String,String,String)] = allKeys +    .groupBy(_._1) +    .leftJoin(existingKeys.groupBy(_._1)) +    .toTypedPipe +    .collect { case (key, ((_, c, mime, cdx), None)) => (key, c, mime, cdx) } + +  missingKeys +    .write(TypedTsv[(String,String,String,String)](output)) + +} + +object DumpUnGrobidedJob { + +  // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" +  def getHBaseColSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { +    HBaseBuilder.build( +      hbaseTable, +      zookeeperHosts, +      List("grobid0:status_code"), +      SourceMode.SCAN_ALL) +  } + +  def getHBaseKeySource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { +    HBaseBuilder.build( +      hbaseTable, +      zookeeperHosts, +      List("f:c", "file:mime", "file:cdx"), +      SourceMode.SCAN_ALL) +  } +} | 
