From cf9166c626a5b9c0ab536591cca240cba8b107fb Mon Sep 17 00:00:00 2001 From: Oscar Boykin Date: Fri, 24 Feb 2012 13:43:17 -0800 Subject: [PATCH] Updates to cascading wip-234 --- build.sbt | 8 ++++---- src/main/scala/com/twitter/scalding/Source.scala | 9 ++++----- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/build.sbt b/build.sbt index 9fba287e27..26c08d27bf 100644 --- a/build.sbt +++ b/build.sbt @@ -10,15 +10,15 @@ scalaVersion := "2.8.1" resolvers += "Concurrent Maven Repo" at "http://conjars.org/repo" -libraryDependencies += "cascading" % "cascading-core" % "2.0.0-wip-227" +libraryDependencies += "cascading" % "cascading-core" % "2.0.0-wip-234" -libraryDependencies += "cascading" % "cascading-local" % "2.0.0-wip-227" +libraryDependencies += "cascading" % "cascading-local" % "2.0.0-wip-234" -libraryDependencies += "cascading" % "cascading-hadoop" % "2.0.0-wip-227" +libraryDependencies += "cascading" % "cascading-hadoop" % "2.0.0-wip-234" libraryDependencies += "cascading.kryo" % "cascading.kryo" % "0.2.1" -libraryDependencies += "com.twitter" % "meat-locker" % "0.1.5" +libraryDependencies += "com.twitter" % "meat-locker" % "0.1.6" libraryDependencies += "commons-lang" % "commons-lang" % "2.4" diff --git a/src/main/scala/com/twitter/scalding/Source.scala b/src/main/scala/com/twitter/scalding/Source.scala index 1ba5e9262f..19722694ca 100644 --- a/src/main/scala/com/twitter/scalding/Source.scala +++ b/src/main/scala/com/twitter/scalding/Source.scala @@ -151,7 +151,7 @@ abstract class Source extends java.io.Serializable { } protected def createHadoopTestReadTap(buffer : Iterable[Tuple]) : - Tap[HadoopFlowProcess, JobConf, RecordReader[_,_], OutputCollector[_,_]] = { + Tap[HadoopFlowProcess, JobConf, RecordReader[_,_], _] = { new MemorySourceTap(buffer.toList.asJava, hdfsScheme.getSourceFields()) } @@ -205,7 +205,7 @@ abstract class Source extends java.io.Serializable { } protected def createHdfsReadTap(hdfsMode : Hdfs) : - Tap[HadoopFlowProcess, JobConf, RecordReader[_,_], OutputCollector[_,_]] = { + Tap[HadoopFlowProcess, JobConf, RecordReader[_,_], _] = { val goodPaths = if (hdfsMode.sourceStrictness) { //we check later that all the paths are good hdfsPaths @@ -223,12 +223,11 @@ abstract class Source extends java.io.Serializable { new Hfs(hdfsScheme, hdfsPaths.head, SinkMode.KEEP) } case 1 => taps.head - case _ => new MultiSourceTap[HadoopFlowProcess, - JobConf, RecordReader[_,_], OutputCollector[_,_]](taps.toSeq : _*) + case _ => new MultiSourceTap[Hfs, HadoopFlowProcess, JobConf, RecordReader[_,_]]( taps.toSeq : _*) } } protected def createHdfsWriteTap(hdfsMode : Hdfs) : - Tap[HadoopFlowProcess, JobConf, RecordReader[_,_], OutputCollector[_,_]] = { + Tap[HadoopFlowProcess, JobConf, _, OutputCollector[_,_]] = { new Hfs(hdfsScheme, hdfsWritePath, SinkMode.REPLACE) }