diff options
Diffstat (limited to 'scald-mvp/src')
-rw-r--r-- | scald-mvp/src/main/scala/example/SimpleHBaseSourceExample.scala | 35 | ||||
-rw-r--r-- | scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala | 58 |
2 files changed, 93 insertions, 0 deletions
diff --git a/scald-mvp/src/main/scala/example/SimpleHBaseSourceExample.scala b/scald-mvp/src/main/scala/example/SimpleHBaseSourceExample.scala new file mode 100644 index 0000000..697805d --- /dev/null +++ b/scald-mvp/src/main/scala/example/SimpleHBaseSourceExample.scala @@ -0,0 +1,35 @@ +package example + +import com.twitter.scalding.{Tsv, Args} +import parallelai.spyglass.base.JobBase +import org.apache.log4j.{Level, Logger} +import parallelai.spyglass.hbase.{HBasePipeConversions, HBaseSource} +import parallelai.spyglass.hbase.HBaseConstants.SourceMode +import cascading.tuple.Fields +import cascading.property.AppProps +import java.util.Properties + +/** + * Simple example of HBaseSource usage + */ +class SimpleHBaseSourceExample(args: Args) extends JobBase(args) with HBasePipeConversions { + + val isDebug: Boolean = args("debug").toBoolean + + if (isDebug) Logger.getRootLogger.setLevel(Level.DEBUG) + + val output = args("output") + + val hbs = new HBaseSource( + "table_name", + "quorum_name:2181", + new Fields("key"), + List("column_family"), + List(new Fields("column_name1", "column_name2")), + sourceMode = SourceMode.GET_LIST, keyList = List("1", "2", "3")) + .read + .debug + .fromBytesWritable(new Fields("key", "column_name1", "column_name2")) + .write(Tsv(output format "get_list")) + + } diff --git a/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala b/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala new file mode 100644 index 0000000..d416af8 --- /dev/null +++ b/scald-mvp/src/test/scala/example/SimpleHBaseSourceExampleTest.scala @@ -0,0 +1,58 @@ +package example + +import org.junit.runner.RunWith +import com.twitter.scalding.{JobTest, TupleConversions} +import org.scalatest.FunSpec +import org.scalatest.junit.JUnitRunner +import org.slf4j.LoggerFactory +import org.apache.hadoop.hbase.io.ImmutableBytesWritable +import cascading.tuple.{Tuple, Fields} +import org.apache.hadoop.hbase.util.Bytes +import scala._ +import com.twitter.scalding.Tsv +import parallelai.spyglass.hbase.HBaseSource +import parallelai.spyglass.hbase.HBaseConstants.SourceMode + +/** + * Example of how to define tests for HBaseSource + */ +@RunWith(classOf[JUnitRunner]) +class SimpleHBaseSourceExampleTest extends FunSpec with TupleConversions { + + val output = "/tmp/testOutput" + + val log = LoggerFactory.getLogger(this.getClass.getName) + + val sampleData = List( + List("1", "kk1", "pp1"), + List("2", "kk2", "pp2"), + List("3", "kk3", "pp3") + ) + + JobTest("parallelai.spyglass.hbase.example.SimpleHBaseSourceExample") + .arg("test", "") + .arg("app.conf.path", "app.conf") + .arg("output", output) + .arg("debug", "true") + .source[Tuple]( + new HBaseSource( + "table_name", + "quorum_name:2181", + new Fields("key"), + List("column_family"), + List(new Fields("column_name1", "column_name2")), + sourceMode = SourceMode.GET_LIST, keyList = List("1", "2", "3")), + sampleData.map(l => new Tuple(l.map(s => {new ImmutableBytesWritable(Bytes.toBytes(s))}):_*))) + .sink[Tuple](Tsv(output format "get_list")) { + outputBuffer => + log.debug("Output => " + outputBuffer) + + it("should return the test data provided.") { + println("outputBuffer.size => " + outputBuffer.size) + assert(outputBuffer.size === 3) + } + } + .run + .finish + +} |