From 9396346ad988988da3fd4b42aa45266657211cca Mon Sep 17 00:00:00 2001 From: Lianhui Wang Date: Tue, 28 Apr 2015 00:19:31 +0800 Subject: [PATCH] put zip to make-distribution.sh --- make-distribution.sh | 1 + project/SparkBuild.scala | 12 ++---------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/make-distribution.sh b/make-distribution.sh index 738a9c4d69601..c9a26d78239b2 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -228,6 +228,7 @@ cp "$SPARK_HOME"/conf/*.template "$DISTDIR"/conf cp "$SPARK_HOME/README.md" "$DISTDIR" cp -r "$SPARK_HOME/bin" "$DISTDIR" cp -r "$SPARK_HOME/python" "$DISTDIR" +zip -r "$DISTDIR"/python/lib/pyspark.zip "$SPARK_HOME"/python/lib/pyspark cp -r "$SPARK_HOME/sbin" "$DISTDIR" cp -r "$SPARK_HOME/ec2" "$DISTDIR" diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 3bd70dc0f6af1..09b4976d10c26 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -361,20 +361,12 @@ object PySparkAssembly { // to be included in the assembly. We can't just add "python/" to the assembly's resource dir // list since that will copy unneeded / unwanted files. resourceGenerators in Compile <+= resourceManaged in Compile map { outDir: File => - val src = new File(BuildCommons.sparkHome, "python/pyspark") - - val zipFile = new File(BuildCommons.sparkHome , "python/lib/pyspark.zip") - IO.delete(zipFile) - def entries(f: File):List[File] = - f :: (if (f.isDirectory) IO.listFiles(f).toList.flatMap(entries(_)) else Nil) - IO.zip(entries(src).map( - d => (d, d.getAbsolutePath.substring(src.getParent.length +1))), - zipFile) - val dst = new File(outDir, "pyspark") if (!dst.isDirectory()) { require(dst.mkdirs()) } + + val src = new File(BuildCommons.sparkHome, "python/pyspark") copy(src, dst) } )