diff options
author | Bryan Newbold <bnewbold@archive.org> | 2018-09-13 21:11:57 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2018-09-13 21:11:57 -0700 |
commit | 5521b1b520a550373369da8b9cbd36148e071115 (patch) | |
tree | 058c20660cb6b1c2ac0a8c23070681092e6fbc7a /scalding/src/main | |
parent | 6681ee7d699fc481b3dc0e1e8f905395a0b42a3b (diff) | |
download | sandcrawler-5521b1b520a550373369da8b9cbd36148e071115.tar.gz sandcrawler-5521b1b520a550373369da8b9cbd36148e071115.zip |
new simple file metadata dump script
Diffstat (limited to 'scalding/src/main')
-rw-r--r-- | scalding/src/main/scala/sandcrawler/DumpFileMetaJob.scala | 36 |
1 files changed, 36 insertions, 0 deletions
diff --git a/scalding/src/main/scala/sandcrawler/DumpFileMetaJob.scala b/scalding/src/main/scala/sandcrawler/DumpFileMetaJob.scala new file mode 100644 index 0000000..b3734f0 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/DumpFileMetaJob.scala @@ -0,0 +1,36 @@ +package sandcrawler + +import java.util.Properties + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import com.twitter.scalding.typed.TDsl._ +import org.apache.hadoop.hbase.io.ImmutableBytesWritable +import org.apache.hadoop.hbase.util.Bytes +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBasePipeConversions +import parallelai.spyglass.hbase.HBaseSource + +// Dumps all the info needed to insert a file entity in Fatcat. Useful for +// joining. +class DumpFileMetaJob(args: Args) extends JobBase(args) with HBasePipeConversions { + + val metaPipe : TypedPipe[(String, String, String, Long)] = HBaseBuilder.build(args("hbase-table"), + args("zookeeper-hosts"), + List("file:cdx", "file:mime", "file:size"), + SourceMode.SCAN_ALL) + .read + .toTypedPipe[(ImmutableBytesWritable,ImmutableBytesWritable,ImmutableBytesWritable,ImmutableBytesWritable)](new Fields("key", "cdx", "mime", "size")) + .filter { case (_, cdx, mime, size) => cdx != null && mime != null && size != null } + .map { case (key, cdx, mime, size) => + (Bytes.toString(key.copyBytes()), + Bytes.toString(cdx.copyBytes()), + Bytes.toString(mime.copyBytes()), + Bytes.toLong(size.copyBytes())) + }; + + metaPipe.write(TypedTsv[(String,String,String,Long)](args("output"))) + +} |