aboutsummaryrefslogtreecommitdiffstats
path: root/scalding
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2018-09-22 20:31:39 -0700
committerBryan Newbold <bnewbold@archive.org>2018-09-22 20:31:39 -0700
commit51ca75189b7c36577f8e80b9db1f66259f0f6178 (patch)
treebada79ed1f16510070e5c354bfb2bf048fdd1500 /scalding
parentf051aa917b7f4ff9ae20ae77f8b84ae27c48233c (diff)
downloadsandcrawler-51ca75189b7c36577f8e80b9db1f66259f0f6178.tar.gz
sandcrawler-51ca75189b7c36577f8e80b9db1f66259f0f6178.zip
new DumpGrobidMetaInsertableJob
Diffstat (limited to 'scalding')
-rw-r--r--scalding/src/main/scala/sandcrawler/DumpGrobidMetaInsertableJob.scala38
1 files changed, 38 insertions, 0 deletions
diff --git a/scalding/src/main/scala/sandcrawler/DumpGrobidMetaInsertableJob.scala b/scalding/src/main/scala/sandcrawler/DumpGrobidMetaInsertableJob.scala
new file mode 100644
index 0000000..ee2b7c2
--- /dev/null
+++ b/scalding/src/main/scala/sandcrawler/DumpGrobidMetaInsertableJob.scala
@@ -0,0 +1,38 @@
+package sandcrawler
+
+import java.util.Properties
+
+import cascading.property.AppProps
+import cascading.tuple.Fields
+import com.twitter.scalding._
+import com.twitter.scalding.typed.TDsl._
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable
+import org.apache.hadoop.hbase.util.Bytes
+import parallelai.spyglass.base.JobBase
+import parallelai.spyglass.hbase.HBaseConstants.SourceMode
+import parallelai.spyglass.hbase.HBasePipeConversions
+import parallelai.spyglass.hbase.HBaseSource
+
+// Dumps the SHA1 key and grobid0:metadata columns, plus file metadata needed
+// to insert into fatcat. Used, eg, as part of long-tail mellon pipeline.
+class DumpGrobidMetaInsertableJob(args: Args) extends JobBase(args) with HBasePipeConversions {
+
+ val metaPipe : TypedPipe[(String, String, String, Long, String)] = HBaseBuilder.build(args("hbase-table"),
+ args("zookeeper-hosts"),
+ List("file:cdx", "file:mime", "file:size", "grobid0:metadata"),
+ SourceMode.SCAN_ALL)
+ .read
+ .toTypedPipe[(ImmutableBytesWritable,ImmutableBytesWritable,ImmutableBytesWritable,ImmutableBytesWritable,ImmutableBytesWritable)](new Fields("key", "cdx", "mime", "size", "metadata"))
+ .filter { case (_, cdx, mime, size, metadata) => cdx != null && mime != null && size != null && metadata != null }
+ .map { case (key, cdx, mime, size, metadata) =>
+ (Bytes.toString(key.copyBytes()),
+ Bytes.toString(cdx.copyBytes()),
+ Bytes.toString(mime.copyBytes()),
+ Bytes.toLong(size.copyBytes()),
+ Bytes.toString(metadata.copyBytes())
+ )
+ };
+
+ metaPipe.write(TypedTsv[(String,String,String,Long,String)](args("output")))
+
+}