diff options
author | Bryan Newbold <bnewbold@archive.org> | 2018-05-23 12:27:59 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2018-05-24 00:02:36 -0700 |
commit | 4ba428db30593b67283dd90b92141f99840dc78e (patch) | |
tree | f63c8e146e7f90a530abfebdb993ab45d57426d5 /scald-mvp/src/test | |
parent | 29e4a83ff76da07bc6ad5d3f49d746ee0bc72023 (diff) | |
download | sandcrawler-4ba428db30593b67283dd90b92141f99840dc78e.tar.gz sandcrawler-4ba428db30593b67283dd90b92141f99840dc78e.zip |
rename jvm/scalding directories
Diffstat (limited to 'scald-mvp/src/test')
3 files changed, 0 insertions, 153 deletions
diff --git a/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala b/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala deleted file mode 100644 index cf068c1..0000000 --- a/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala +++ /dev/null @@ -1,58 +0,0 @@ -package example - -import org.junit.runner.RunWith -import com.twitter.scalding.{JobTest, TupleConversions} -import org.scalatest.FunSpec -import org.scalatest.junit.JUnitRunner -import org.slf4j.LoggerFactory -import org.apache.hadoop.hbase.io.ImmutableBytesWritable -import cascading.tuple.{Tuple, Fields} -import org.apache.hadoop.hbase.util.Bytes -import scala._ -import com.twitter.scalding.Tsv -import parallelai.spyglass.hbase.HBaseSource -import parallelai.spyglass.hbase.HBaseConstants.SourceMode - -/** - * Example of how to define tests for HBaseSource - */ -@RunWith(classOf[JUnitRunner]) -class SimpleHBaseSourceExampleTest extends FunSpec with TupleConversions { - - val output = "/tmp/testOutput" - - val log = LoggerFactory.getLogger(this.getClass.getName) - - val sampleData = List( - List("1", "kk1", "pp1"), - List("2", "kk2", "pp2"), - List("3", "kk3", "pp3") - ) - - JobTest("example.SimpleHBaseSourceExample") - .arg("test", "") - .arg("app.conf.path", "app.conf") - .arg("output", output) - .arg("debug", "true") - .source[Tuple]( - new HBaseSource( - "table_name", - "mtrcs-zk1.us.archive.org:2181", - new Fields("key"), - List("column_family"), - List(new Fields("column_name1", "column_name2")), - sourceMode = SourceMode.GET_LIST, keyList = List("1", "2", "3")), - sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(Bytes.toBytes(s))}):_*))) - .sink[Tuple](Tsv(output format "get_list")) { - outputBuffer => - log.debug("Output => " + outputBuffer) - - it("should return the test data provided.") { - println("outputBuffer.size => " + outputBuffer.size) - assert(outputBuffer.size === 3) - } - } - .run - .finish - -} diff --git a/scald-mvp/src/test/scala/example/WordCountTest.scala b/scald-mvp/src/test/scala/example/WordCountTest.scala deleted file mode 100644 index c42770f..0000000 --- a/scald-mvp/src/test/scala/example/WordCountTest.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* -Copyright 2012 Twitter, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -package com.twitter.scalding - -import org.scalatest.{ Matchers, WordSpec } - -class WordCountTest extends WordSpec with Matchers { - "A WordCount job" should { - JobTest(new example.WordCountJob(_)) - .arg("input", "inputFile") - .arg("output", "outputFile") - .source(TextLine("inputFile"), List((0, "hack hack hack and hack"))) - .sink[(String, Int)](TypedTsv[(String, Long)]("outputFile")){ outputBuffer => - val outMap = outputBuffer.toMap - "count words correctly" in { - outMap("hack") shouldBe 4 - outMap("and") shouldBe 1 - } - } - .run - .finish() - } -} diff --git a/scald-mvp/src/test/scala/sandcrawler/HBaseRowCountTest.scala b/scald-mvp/src/test/scala/sandcrawler/HBaseRowCountTest.scala deleted file mode 100644 index 598f45d..0000000 --- a/scald-mvp/src/test/scala/sandcrawler/HBaseRowCountTest.scala +++ /dev/null @@ -1,59 +0,0 @@ -package example - -import org.junit.runner.RunWith -import com.twitter.scalding.{JobTest, TupleConversions} -import org.scalatest.FunSpec -import org.scalatest.junit.JUnitRunner -import org.slf4j.LoggerFactory -import org.apache.hadoop.hbase.io.ImmutableBytesWritable -import cascading.tuple.{Tuple, Fields} -import org.apache.hadoop.hbase.util.Bytes -import scala._ -import com.twitter.scalding.Tsv -import parallelai.spyglass.hbase.HBaseSource -import parallelai.spyglass.hbase.HBaseConstants.SourceMode - -/** - * Example of how to define tests for HBaseSource - */ -@RunWith(classOf[JUnitRunner]) -class HBaseRowCountTest extends FunSpec with TupleConversions { - - val output = "/tmp/testOutput" - - val log = LoggerFactory.getLogger(this.getClass.getName) - - val sampleData = List( - List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", "a", "b"), - List("sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU", "a", "b") - ) - - JobTest("sandcrawler.HBaseRowCountJob") - .arg("test", "") - .arg("app.conf.path", "app.conf") - .arg("output", output) - .arg("debug", "true") - .source[Tuple]( - new HBaseSource( - //"table_name", - //"quorum_name:2181", - "wbgrp-journal-extract-0-qa", - "mtrcs-zk1.us.archive.org:2181", - new Fields("key"), - List("file"), - List(new Fields("size", "mimetype")), - sourceMode = SourceMode.GET_LIST, keyList = List("sha1:K2DKSSVTXWPRMFDTWSTCQW3RVWRIOV3Q", "sha1:C3YNNEGH5WAG5ZAAXWAEBNXJWT6CZ3WU")), - sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(Bytes.toBytes(s))}):_*))) - .sink[Tuple](Tsv(output format "get_list")) { - outputBuffer => - log.debug("Output => " + outputBuffer) - - it("should return the test data provided.") { - println("outputBuffer.size => " + outputBuffer.size) - assert(outputBuffer.size === 2) - } - } - .run - .finish - -} |