aboutsummaryrefslogtreecommitdiffstats
path: root/scalding
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2018-08-20 18:56:38 -0700
committerBryan Newbold <bnewbold@archive.org>2018-08-21 21:25:56 -0700
commitcce6047bdcf979729ec286d4c1f37676aeff2707 (patch)
treef6774d360ad1767ddd1afc11e788b52b699eb75c /scalding
parent9d9ff332fbbaf5194047bbd056d624a5e999b8c7 (diff)
downloadsandcrawler-cce6047bdcf979729ec286d4c1f37676aeff2707.tar.gz
sandcrawler-cce6047bdcf979729ec286d4c1f37676aeff2707.zip
fix bugs/typos in HBaseColCountJob and HBaseStatusCountJob
Diffstat (limited to 'scalding')
-rw-r--r--scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala2
-rw-r--r--scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala6
-rw-r--r--scalding/src/test/scala/sandcrawler/HBaseStatusCodeCountTest.scala6
-rw-r--r--scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala15
4 files changed, 11 insertions, 18 deletions
diff --git a/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala
index a007339..20cc7a1 100644
--- a/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala
+++ b/scalding/src/main/scala/sandcrawler/HBaseColCountJob.scala
@@ -17,7 +17,7 @@ class HBaseColCountJob(args: Args) extends JobBase(args) with HBasePipeConversio
HBaseColCountJob.getHBaseSource(
args("hbase-table"),
args("zookeeper-hosts"),
- args("column")
+ args("column"))
.read
.debug
.groupAll { _.size('count) }
diff --git a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
index 1635e03..f79d672 100644
--- a/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
+++ b/scalding/src/main/scala/sandcrawler/HBaseStatusCountJob.scala
@@ -20,13 +20,13 @@ class HBaseStatusCountJob(args: Args) extends JobBase(args) with HBasePipeConver
args("zookeeper-hosts"),
"grobid0:status")
- val statusPipe : TypedPipe[Long] = source
+ val statusPipe : TypedPipe[String] = source
.read
.toTypedPipe[(ImmutableBytesWritable,ImmutableBytesWritable)]('key, 'status)
- .map { case (key, raw_status) => Bytes.toString(raw_code.copyBytes()) }
+ .map { case (key, raw_status) => Bytes.toString(raw_status.copyBytes()) }
statusPipe.groupBy { identity }
.size
.debug
- .write(TypedTsv[(Long,String)](args("output")))
+ .write(TypedTsv[(String,Long)](args("output")))
}
diff --git a/scalding/src/test/scala/sandcrawler/HBaseStatusCodeCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseStatusCodeCountTest.scala
index 3291670..d2cf9de 100644
--- a/scalding/src/test/scala/sandcrawler/HBaseStatusCodeCountTest.scala
+++ b/scalding/src/test/scala/sandcrawler/HBaseStatusCodeCountTest.scala
@@ -17,7 +17,7 @@ import parallelai.spyglass.hbase.HBaseSource
import scala._
@RunWith(classOf[JUnitRunner])
-class HBaseStatusCountTest extends FunSpec with TupleConversions {
+class HBaseStatusCodeCountTest extends FunSpec with TupleConversions {
val output = "/tmp/testOutput"
val (testTable, testHost) = ("test-table", "dummy-host:2181")
@@ -44,7 +44,7 @@ class HBaseStatusCountTest extends FunSpec with TupleConversions {
val statusType1Count = sampleData.count(lst => lst(1) == statusType1Bytes)
val statusType2Count = sampleData.count(lst => lst(1) == statusType2Bytes)
- JobTest("sandcrawler.HBaseStatusCountJob")
+ JobTest("sandcrawler.HBaseStatusCodeCountJob")
.arg("test", "")
.arg("app.conf.path", "app.conf")
.arg("output", output)
@@ -55,7 +55,7 @@ class HBaseStatusCountTest extends FunSpec with TupleConversions {
sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*)))
.sink[Tuple](TypedTsv[(Long, Long)](output)) {
outputBuffer =>
- it("should return a 2-element list.") {
+ it("should return a correct number of elements.") {
assert(outputBuffer.size === 2)
}
diff --git a/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala b/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala
index b53abeb..7e91af3 100644
--- a/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala
+++ b/scalding/src/test/scala/sandcrawler/HBaseStatusCountTest.scala
@@ -17,7 +17,7 @@ import parallelai.spyglass.hbase.HBaseSource
import scala._
@RunWith(classOf[JUnitRunner])
-class HBaseStatusCodeCountTest extends FunSpec with TupleConversions {
+class HBaseStatusCountTest extends FunSpec with TupleConversions {
val output = "/tmp/testOutput"
val (testTable, testHost) = ("test-table", "dummy-host:2181")
@@ -42,27 +42,20 @@ class HBaseStatusCodeCountTest extends FunSpec with TupleConversions {
val statusType1Count = sampleData.count(lst => lst(1) == statusType1Bytes)
val statusType2Count = sampleData.count(lst => lst(1) == statusType2Bytes)
- JobTest("sandcrawler.HBaseStatusCodeCountJob")
+ JobTest("sandcrawler.HBaseStatusCountJob")
.arg("test", "")
.arg("app.conf.path", "app.conf")
.arg("output", output)
.arg("hbase-table", testTable)
.arg("zookeeper-hosts", testHost)
.arg("debug", "true")
- .source[Tuple](HBaseCountJob.getHBaseSource(testTable, testHost, "grobid0:status_code"),
+ .source[Tuple](HBaseCountJob.getHBaseSource(testTable, testHost, "grobid0:status"),
sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(s)}):_*)))
- .sink[Tuple](TypedTsv[(Long, Long)](output)) {
+ .sink[Tuple](TypedTsv[(String, Long)](output)) {
outputBuffer =>
it("should return a 2-element list.") {
assert(outputBuffer.size === 2)
}
-
- // Convert List[Tuple] to Map[Long, Long].
- val counts = outputBuffer.map(t => (t.getLong(0), t.getLong(1))).toMap
- it("should have the appropriate number of each status type") {
- assert(counts(statusType1) == statusType1Count)
- assert(counts(statusType2) == statusType2Count)
- }
}
.run
.finish