From cc54dea9c77f8386bafd71977b2be1b3b21f0aae Mon Sep 17 00:00:00 2001 From: Ellen Spertus Date: Wed, 6 Jun 2018 09:40:04 -0700 Subject: Added job and test for counting mime types. --- .../main/scala/sandcrawler/HBaseMimeCountJob.scala | 28 +++++++++ .../scala/sandcrawler/HBaseMimeCountTest.scala | 68 ++++++++++++++++++++++ 2 files changed, 96 insertions(+) create mode 100644 scalding/src/main/scala/sandcrawler/HBaseMimeCountJob.scala create mode 100644 scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala (limited to 'scalding/src') diff --git a/scalding/src/main/scala/sandcrawler/HBaseMimeCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseMimeCountJob.scala new file mode 100644 index 0000000..819a652 --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/HBaseMimeCountJob.scala @@ -0,0 +1,28 @@ +package sandcrawler + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import java.util.Properties +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions} +import parallelai.spyglass.hbase.HBaseConstants.SourceMode + +class HBaseMimeCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { + val output = args("output") + + HBaseMimeCountJob.getHBaseSource + .read + .fromBytesWritable(List('mime)) + .debug + .groupBy('mime){group => group.size('count)} + .write(Tsv(output)) +} + +object HBaseMimeCountJob { + def getHBaseSource = HBaseBuilder.build( + "wbgrp-journal-extract-0-qa", // HBase Table Name + "mtrcs-zk1.us.archive.org:2181", // HBase Zookeeper server (to get runtime config info; can be array?) + List("file:mime"), + SourceMode.SCAN_ALL) +} diff --git a/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala new file mode 100644 index 0000000..000420b --- /dev/null +++ b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala @@ -0,0 +1,68 @@ +package sandcrawler + +import cascading.tuple.{Tuple, Fields} +import com.twitter.scalding.{JobTest, Tsv, TupleConversions} +import org.apache.hadoop.hbase.io.ImmutableBytesWritable +import org.apache.hadoop.hbase.util.Bytes +import org.junit.runner.RunWith +import org.scalatest.FunSpec +import org.scalatest.junit.JUnitRunner +import org.slf4j.LoggerFactory +import parallelai.spyglass.hbase.HBaseSource +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import scala._ + +/** + * Example of how to define tests for HBaseSource + */ +@RunWith(classOf[JUnitRunner]) +class HBaseMimeCountTest extends FunSpec with TupleConversions { + + val output = "/tmp/testOutput" + + val log = LoggerFactory.getLogger(this.getClass.getName) + + val mimeType1 = "text/html" + val mimeType2 = "application/pdf" + + val sampleData = List( + List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", mimeType1), + List("sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU", mimeType1), + List("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", mimeType2), + List("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", mimeType2), + List("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", mimeType2), + List("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", mimeType2), + List("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", mimeType1), + List("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", mimeType2) + ) + + val mimeType1Count = 3 + val mimeType2Count = 5 + + JobTest("sandcrawler.HBaseMimeCountJob") + .arg("test", "") + .arg("app.conf.path", "app.conf") + .arg("output", output) + .arg("debug", "true") + .source[Tuple](HBaseMimeCountJob.getHBaseSource, + sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(Bytes.toBytes(s))}):_*))) + .sink[Tuple](Tsv(output)) { + outputBuffer => + it("should return a 2-element list.") { + println("outputBuffer.size => " + outputBuffer.size) + println("outputBuffer(0) => " + outputBuffer(0)) + println("outputBuffer(1) => " + outputBuffer(1)) + assert(outputBuffer.size === 2) + } + + val counts = outputBuffer.map(t => (t.getString(0), t.getInteger(1))).toMap + + it("should have the appropriate number of each mime type") { + assert(counts(mimeType1) == mimeType1Count) + assert(counts(mimeType2) == mimeType2Count) + } + } + .run + .finish + +} -- cgit v1.2.3 From 1d3328eead103f36d44e9dfabbf05074edc684cb Mon Sep 17 00:00:00 2001 From: Ellen Spertus Date: Wed, 6 Jun 2018 09:43:10 -0700 Subject: Removed copied comment. --- scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) (limited to 'scalding/src') diff --git a/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala index 000420b..439b4f4 100644 --- a/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala +++ b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala @@ -12,9 +12,6 @@ import parallelai.spyglass.hbase.HBaseSource import parallelai.spyglass.hbase.HBaseConstants.SourceMode import scala._ -/** - * Example of how to define tests for HBaseSource - */ @RunWith(classOf[JUnitRunner]) class HBaseMimeCountTest extends FunSpec with TupleConversions { @@ -49,14 +46,11 @@ class HBaseMimeCountTest extends FunSpec with TupleConversions { .sink[Tuple](Tsv(output)) { outputBuffer => it("should return a 2-element list.") { - println("outputBuffer.size => " + outputBuffer.size) - println("outputBuffer(0) => " + outputBuffer(0)) - println("outputBuffer(1) => " + outputBuffer(1)) assert(outputBuffer.size === 2) } + // Convert List[Tuple] to Map[String, Integer]. val counts = outputBuffer.map(t => (t.getString(0), t.getInteger(1))).toMap - it("should have the appropriate number of each mime type") { assert(counts(mimeType1) == mimeType1Count) assert(counts(mimeType2) == mimeType2Count) @@ -64,5 +58,4 @@ class HBaseMimeCountTest extends FunSpec with TupleConversions { } .run .finish - } -- cgit v1.2.3 From f81e193b4a2c7f61a46d1ead1bdf6b746997b3a8 Mon Sep 17 00:00:00 2001 From: Ellen Spertus Date: Wed, 6 Jun 2018 09:50:38 -0700 Subject: Made test data more robust. --- scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'scalding/src') diff --git a/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala index 439b4f4..eb6f4ff 100644 --- a/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala +++ b/scalding/src/test/scala/sandcrawler/HBaseMimeCountTest.scala @@ -33,8 +33,8 @@ class HBaseMimeCountTest extends FunSpec with TupleConversions { List("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", mimeType2) ) - val mimeType1Count = 3 - val mimeType2Count = 5 + val mimeType1Count = sampleData.count(lst => lst(1) == mimeType1) + val mimeType2Count = sampleData.count(lst => lst(1) == mimeType2) JobTest("sandcrawler.HBaseMimeCountJob") .arg("test", "") -- cgit v1.2.3 From 6eca6290aa3fc829f4767023ae075350a0a78192 Mon Sep 17 00:00:00 2001 From: Ellen Spertus Date: Thu, 7 Jun 2018 12:52:08 -0700 Subject: Added status count. --- .../scala/sandcrawler/HBaseStatusCountJob.scala | 28 ++++++++++ .../scala/sandcrawler/HBaseStatusCountTest.scala | 61 ++++++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala create mode 100644 scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala (limited to 'scalding/src') diff --git a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala new file mode 100644 index 0000000..0675efc --- /dev/null +++ b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala @@ -0,0 +1,28 @@ +package sandcrawler + +import cascading.property.AppProps +import cascading.tuple.Fields +import com.twitter.scalding._ +import java.util.Properties +import parallelai.spyglass.base.JobBase +import parallelai.spyglass.hbase.{HBaseSource, HBasePipeConversions} +import parallelai.spyglass.hbase.HBaseConstants.SourceMode + +class HBaseStatusCountJob(args: Args) extends JobBase(args) with HBasePipeConversions { + val output = args("output") + + HBaseStatusCountJob.getHBaseSource + .read + .fromBytesWritable(List('status)) +// .debug + .groupBy('status){group => group.size('count)} + .write(Tsv(output)) +} + +object HBaseStatusCountJob { + def getHBaseSource = HBaseBuilder.build( + "wbgrp-journal-extract-0-qa", // HBase Table Name + "mtrcs-zk1.us.archive.org:2181", // HBase Zookeeper server (to get runtime config info; can be array?) + List("grobid0:status"), + SourceMode.SCAN_ALL) +} diff --git a/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala new file mode 100644 index 0000000..8b5c3d6 --- /dev/null +++ b/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala @@ -0,0 +1,61 @@ +package sandcrawler + +import cascading.tuple.{Tuple, Fields} +import com.twitter.scalding.{JobTest, Tsv, TupleConversions} +import org.apache.hadoop.hbase.io.ImmutableBytesWritable +import org.apache.hadoop.hbase.util.Bytes +import org.junit.runner.RunWith +import org.scalatest.FunSpec +import org.scalatest.junit.JUnitRunner +import org.slf4j.LoggerFactory +import parallelai.spyglass.hbase.HBaseSource +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import scala._ + +@RunWith(classOf[JUnitRunner]) +class HBaseStatusCountTest extends FunSpec with TupleConversions { + + val output = "/tmp/testOutput" + + val log = LoggerFactory.getLogger(this.getClass.getName) + + val statusType1 = "200" + val statusType2 = "404" + + val sampleData = List( + List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", statusType1), + List("sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU", statusType1), + List("sha1:SDKUVHC3YNNEGH5WAG5ZAAXWAEBNX4WT", statusType2), + List("sha1:35985C3YNNEGH5WAG5ZAAXWAEBNXJW56", statusType2), + List("sha1:885C3YNNEGH5WAG5ZAAXWA8BNXJWT6CZ", statusType2), + List("sha1:00904C3YNNEGH5WAG5ZA9XWAEBNXJWT6", statusType2), + List("sha1:249C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ", statusType1), + List("sha1:095893C3YNNEGH5WAG5ZAAXWAEBNXJWT", statusType2) + ) + + val statusType1Count = sampleData.count(lst => lst(1) == statusType1) + val statusType2Count = sampleData.count(lst => lst(1) == statusType2) + + JobTest("sandcrawler.HBaseStatusCountJob") + .arg("test", "") + .arg("app.conf.path", "app.conf") + .arg("output", output) + .arg("debug", "true") + .source[Tuple](HBaseStatusCountJob.getHBaseSource, + sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(Bytes.toBytes(s))}):_*))) + .sink[Tuple](Tsv(output)) { + outputBuffer => + it("should return a 2-element list.") { + assert(outputBuffer.size === 2) + } + + // Convert List[Tuple] to Map[String, Integer]. + val counts = outputBuffer.map(t => (t.getString(0), t.getInteger(1))).toMap + it("should have the appropriate number of each status type") { + assert(counts(statusType1) == statusType1Count) + assert(counts(statusType2) == statusType2Count) + } + } + .run + .finish +} -- cgit v1.2.3