From b4c7b7fdc2537f4defb742e2b7b9de4524adf174 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Fri, 24 Aug 2018 18:07:11 -0700 Subject: rename DumpUnGrobidedJob --- .../main/scala/sandcrawler/DumpUnGrobidedJob.scala | 67 ++++++++++++++++++++ .../main/scala/sandcrawler/UnGrobidedDumpJob.scala | 67 -------------------- .../scala/sandcrawler/DumpUnGrobidedJobTest.scala | 72 ++++++++++++++++++++++ .../scala/sandcrawler/UnGrobidedDumpJobTest.scala | 72 ---------------------- 4 files changed, 139 insertions(+), 139 deletions(-) create mode 100644 scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala delete mode 100644 scalding/src/main/scala/sandcrawler/UnGrobidedDumpJob.scala create mode 100644 scalding/src/test/scala/sandcrawler/DumpUnGrobidedJobTest.scala delete mode 100644 scalding/src/test/scala/sandcrawler/UnGrobidedDumpJobTest.scala (limited to 'scalding') diff --git a/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala new file mode 100644 index 0000000..7fd3ce0 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/DumpUnGrobidedJob.scala @@ -0,0 +1,67 @@ +package sandcrawler + +import java.util.Properties + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import com.twitter.scalding.typed.TDsl._ +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBasePipeConversions +import parallelai.spyglass.hbase.HBaseSource + +// Filters for HBase rows which have not had GROBID run on them, but do have +// full CDX metadata, and dumps to a TSV for later extraction by the +// "extraction-ungrobided" job. +// +// Does the same horrible join thing that DumpUnGrobidedJob does. +class DumpUnGrobidedJob(args: Args) extends JobBase(args) with HBasePipeConversions { + + val output = args("output") + + val allKeys : TypedPipe[(String,String,String,String)] = DumpUnGrobidedJob.getHBaseKeySource( + args("hbase-table"), + args("zookeeper-hosts")) + .read + .fromBytesWritable('key, 'c, 'mime, 'cdx) + .toTypedPipe[(String,String,String,String)]('key, 'c, 'mime, 'cdx) + + val existingKeys : TypedPipe[(String,Boolean)] = DumpUnGrobidedJob.getHBaseColSource( + args("hbase-table"), + args("zookeeper-hosts")) + .read + .fromBytesWritable('key) + .toTypedPipe[String]('key) + .map{ key => (key, true) } + + val missingKeys : TypedPipe[(String,String,String,String)] = allKeys + .groupBy(_._1) + .leftJoin(existingKeys.groupBy(_._1)) + .toTypedPipe + .collect { case (key, ((_, c, mime, cdx), None)) => (key, c, mime, cdx) } + + missingKeys + .write(TypedTsv[(String,String,String,String)](output)) + +} + +object DumpUnGrobidedJob { + + // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" + def getHBaseColSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { + HBaseBuilder.build( + hbaseTable, + zookeeperHosts, + List("grobid0:status_code"), + SourceMode.SCAN_ALL) + } + + def getHBaseKeySource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { + HBaseBuilder.build( + hbaseTable, + zookeeperHosts, + List("f:c", "file:mime", "file:cdx"), + SourceMode.SCAN_ALL) + } +} diff --git a/scalding/src/main/scala/sandcrawler/UnGrobidedDumpJob.scala b/scalding/src/main/scala/sandcrawler/UnGrobidedDumpJob.scala deleted file mode 100644 index 0ce9167..0000000 --- a/scalding/src/main/scala/sandcrawler/UnGrobidedDumpJob.scala +++ /dev/null @@ -1,67 +0,0 @@ -package sandcrawler - -import java.util.Properties - -import cascading.property.AppProps -import cascading.tuple.Fields -import com.twitter.scalding._ -import com.twitter.scalding.typed.TDsl._ -import parallelai.spyglass.base.JobBase -import parallelai.spyglass.hbase.HBaseConstants.SourceMode -import parallelai.spyglass.hbase.HBasePipeConversions -import parallelai.spyglass.hbase.HBaseSource - -// Filters for HBase rows which have not had GROBID run on them, but do have -// full CDX metadata, and dumps to a TSV for later extraction by the -// "extraction-ungrobided" job. -// -// Does the same horrible join thing that UnGrobidedDumpJob does. -class UnGrobidedDumpJob(args: Args) extends JobBase(args) with HBasePipeConversions { - - val output = args("output") - - val allKeys : TypedPipe[(String,String,String,String)] = UnGrobidedDumpJob.getHBaseKeySource( - args("hbase-table"), - args("zookeeper-hosts")) - .read - .fromBytesWritable('key, 'c, 'mime, 'cdx) - .toTypedPipe[(String,String,String,String)]('key, 'c, 'mime, 'cdx) - - val existingKeys : TypedPipe[(String,Boolean)] = UnGrobidedDumpJob.getHBaseColSource( - args("hbase-table"), - args("zookeeper-hosts")) - .read - .fromBytesWritable('key) - .toTypedPipe[String]('key) - .map{ key => (key, true) } - - val missingKeys : TypedPipe[(String,String,String,String)] = allKeys - .groupBy(_._1) - .leftJoin(existingKeys.groupBy(_._1)) - .toTypedPipe - .collect { case (key, ((_, c, mime, cdx), None)) => (key, c, mime, cdx) } - - missingKeys - .write(TypedTsv[(String,String,String,String)](output)) - -} - -object UnGrobidedDumpJob { - - // eg, "wbgrp-journal-extract-0-qa",7 "mtrcs-zk1.us.archive.org:2181" - def getHBaseColSource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { - HBaseBuilder.build( - hbaseTable, - zookeeperHosts, - List("grobid0:status_code"), - SourceMode.SCAN_ALL) - } - - def getHBaseKeySource(hbaseTable: String, zookeeperHosts: String) : HBaseSource = { - HBaseBuilder.build( - hbaseTable, - zookeeperHosts, - List("f:c", "file:mime", "file:cdx"), - SourceMode.SCAN_ALL) - } -} diff --git a/scalding/src/test/scala/sandcrawler/DumpUnGrobidedJobTest.scala b/scalding/src/test/scala/sandcrawler/DumpUnGrobidedJobTest.scala new file mode 100644 index 0000000..8dda5c8 --- /dev/null +++ b/scalding/src/test/scala/sandcrawler/DumpUnGrobidedJobTest.scala @@ -0,0 +1,72 @@ +package sandcrawler + +import cascading.tuple.Fields +import cascading.tuple.Tuple +import com.twitter.scalding.JobTest +import com.twitter.scalding.Tsv +import com.twitter.scalding.TupleConversions +import com.twitter.scalding.TypedTsv +import org.apache.hadoop.hbase.io.ImmutableBytesWritable +import org.apache.hadoop.hbase.util.Bytes +import org.junit.runner.RunWith +import org.scalatest.FunSpec +import org.scalatest.junit.JUnitRunner +import org.slf4j.LoggerFactory +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import parallelai.spyglass.hbase.HBaseSource +import scala._ + +@RunWith(classOf[JUnitRunner]) +class DumpUnGrobidedJobTest extends FunSpec with TupleConversions { + + val output = "/tmp/testOutput" + val (testTable, testHost) = ("test-table", "dummy-host:2181") + + val log = LoggerFactory.getLogger(this.getClass.getName) + + val statusCode: Long = 200 + val statusBytes = Bytes.toBytes(statusCode) + + val sampleDataGrobid : List[List[Array[Byte]]] = List( + ("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", statusBytes), + ("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", statusBytes), + ("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", statusBytes), + ("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", statusBytes), + ("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", statusBytes), + ("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", statusBytes)) + .map(pair => List(Bytes.toBytes(pair._1), pair._2)) + + val sampleDataFile : List[List[Array[Byte]]] = List( + ("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), + ("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", """{c-json-data}""", "application/pdf", """{cdx-json-data}""")) + .map(pair => List(Bytes.toBytes(pair._1), + Bytes.toBytes(pair._2), + Bytes.toBytes(pair._3), + Bytes.toBytes(pair._4))) + + JobTest("sandcrawler.DumpUnGrobidedJob") + .arg("test", "") + .arg("app.conf.path", "app.conf") + .arg("output", output) + .arg("hbase-table", testTable) + .arg("zookeeper-hosts", testHost) + .arg("debug", "true") + .source[Tuple](DumpUnGrobidedJob.getHBaseColSource(testTable, testHost), + sampleDataGrobid.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*))) + .source[Tuple](DumpUnGrobidedJob.getHBaseKeySource(testTable, testHost), + sampleDataFile.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*))) + .sink[Tuple](TypedTsv[(String,String,String,String)](output)) { + outputBuffer => + it("should return correct-length list.") { + assert(outputBuffer.size === 2) + } + } + .run + .finish +} diff --git a/scalding/src/test/scala/sandcrawler/UnGrobidedDumpJobTest.scala b/scalding/src/test/scala/sandcrawler/UnGrobidedDumpJobTest.scala deleted file mode 100644 index a847ebb..0000000 --- a/scalding/src/test/scala/sandcrawler/UnGrobidedDumpJobTest.scala +++ /dev/null @@ -1,72 +0,0 @@ -package sandcrawler - -import cascading.tuple.Fields -import cascading.tuple.Tuple -import com.twitter.scalding.JobTest -import com.twitter.scalding.Tsv -import com.twitter.scalding.TupleConversions -import com.twitter.scalding.TypedTsv -import org.apache.hadoop.hbase.io.ImmutableBytesWritable -import org.apache.hadoop.hbase.util.Bytes -import org.junit.runner.RunWith -import org.scalatest.FunSpec -import org.scalatest.junit.JUnitRunner -import org.slf4j.LoggerFactory -import parallelai.spyglass.hbase.HBaseConstants.SourceMode -import parallelai.spyglass.hbase.HBaseSource -import scala._ - -@RunWith(classOf[JUnitRunner]) -class UnGrobidedDumpJobTest extends FunSpec with TupleConversions { - - val output = "/tmp/testOutput" - val (testTable, testHost) = ("test-table", "dummy-host:2181") - - val log = LoggerFactory.getLogger(this.getClass.getName) - - val statusCode: Long = 200 - val statusBytes = Bytes.toBytes(statusCode) - - val sampleDataGrobid : List[List[Array[Byte]]] = List( - ("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", statusBytes), - ("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", statusBytes), - ("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", statusBytes), - ("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", statusBytes), - ("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", statusBytes), - ("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", statusBytes)) - .map(pair => List(Bytes.toBytes(pair._1), pair._2)) - - val sampleDataFile : List[List[Array[Byte]]] = List( - ("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", """{c-json-data}""", "application/pdf", """{cdx-json-data}"""), - ("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", """{c-json-data}""", "application/pdf", """{cdx-json-data}""")) - .map(pair => List(Bytes.toBytes(pair._1), - Bytes.toBytes(pair._2), - Bytes.toBytes(pair._3), - Bytes.toBytes(pair._4))) - - JobTest("sandcrawler.UnGrobidedDumpJob") - .arg("test", "") - .arg("app.conf.path", "app.conf") - .arg("output", output) - .arg("hbase-table", testTable) - .arg("zookeeper-hosts", testHost) - .arg("debug", "true") - .source[Tuple](UnGrobidedDumpJob.getHBaseColSource(testTable, testHost), - sampleDataGrobid.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*))) - .source[Tuple](UnGrobidedDumpJob.getHBaseKeySource(testTable, testHost), - sampleDataFile.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*))) - .sink[Tuple](TypedTsv[(String,String,String,String)](output)) { - outputBuffer => - it("should return correct-length list.") { - assert(outputBuffer.size === 2) - } - } - .run - .finish -} -- cgit v1.2.3