aboutsummaryrefslogtreecommitdiffstats
path: root/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2018-08-24 18:07:11 -0700
committerBryan Newbold <bnewbold@archive.org>2018-08-24 18:07:11 -0700
commitb4c7b7fdc2537f4defb742e2b7b9de4524adf174 (patch)
treedbab9a3ac495c8353ac26c823f2ebdb17b6b65fe /scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala
parentcb563e890a8544fef1367d7b0f556cda6f0daca0 (diff)
downloadsandcrawler-b4c7b7fdc2537f4defb742e2b7b9de4524adf174.tar.gz
sandcrawler-b4c7b7fdc2537f4defb742e2b7b9de4524adf174.zip
rename DumpUnGrobidedJob
Diffstat (limited to 'scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala')
-rw-r--r--scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala67
1 files changed, 67 insertions, 0 deletions
diff --git a/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala
new file mode 100644
index 0000000..7fd3ce0
--- /dev/null
+++ b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala
@@ -0,0 +1,67 @@
+package sandcrawler
+
+import java.util.Properties
+
+import cascading.property.AppProps
+import cascading.tuple.Fields
+import com.twitter.scalding._
+import com.twitter.scalding.typed.TDsl._
+import parallelai.spyglass.base.JobBase
+import parallelai.spyglass.hbase.HBaseConstants.SourceMode
+import parallelai.spyglass.hbase.HBasePipeConversions
+import parallelai.spyglass.hbase.HBaseSource
+
+// Filters for HBase rows which have not had GROBID run on them, but do have
+// full CDX metadata, and dumps to a TSV for later extraction by the
+// "extraction-ungrobided" job.
+//
+// Does the same horrible join thing that DumpUnGrobidedJob does.
+class DumpUnGrobidedJob(args: Args) extends JobBase(args) with HBasePipeConversions {
+
+ val output = args("output")
+
+ val allKeys : TypedPipe[(String,String,String,String)] = DumpUnGrobidedJob.getHBaseKeySource(
+ args("hbase-table"),
+ args("zookeeper-hosts"))
+ .read
+ .fromBytesWritable('key, 'c, 'mime, 'cdx)
+ .toTypedPipe[(String,String,String,String)]('key, 'c, 'mime, 'cdx)
+
+ val existingKeys : TypedPipe[(String,Boolean)] = DumpUnGrobidedJob.getHBaseColSource(
+ args("hbase-table"),
+ args("zookeeper-hosts"))
+ .read
+ .fromBytesWritable('key)
+ .toTypedPipe[String]('key)
+ .map{ key => (key, true) }
+
+ val missingKeys : TypedPipe[(String,String,String,String)] = allKeys
+ .groupBy(_._1)
+ .leftJoin(existingKeys.groupBy(_._1))
+ .toTypedPipe
+ .collect { case (key, ((_, c, mime, cdx), None)) => (key, c, mime, cdx) }
+
+ missingKeys
+ .write(TypedTsv[(String,String,String,String)](output))
+
+}
+
+object DumpUnGrobidedJob {
+
+ // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181"
+ def getHBaseColSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = {
+ HBaseBuilder.build(
+ hbaseTable,
+ zookeeperHosts,
+ List("grobid0:status_code"),
+ SourceMode.SCAN_ALL)
+ }
+
+ def getHBaseKeySource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = {
+ HBaseBuilder.build(
+ hbaseTable,
+ zookeeperHosts,
+ List("f:c", "file:mime", "file:cdx"),
+ SourceMode.SCAN_ALL)
+ }
+}