Permalink
Browse files

Merge branch 'master' into mos-shuffle-tracked

Conflicts:
	.gitignore
	core/src/main/scala/spark/LocalFileShuffle.scala
	src/scala/spark/BasicLocalFileShuffle.scala
	src/scala/spark/Broadcast.scala
	src/scala/spark/LocalFileShuffle.scala
  • Loading branch information...
2 parents dfbc5af + 9d2d533 commit ac7e066383a6878beb0618597c2be6fa9eb1982e @mosharaf mosharaf committed Apr 27, 2011
Showing 304 changed files with 103,106 additions and 4,645 deletions.
View
@@ -1,7 +1,26 @@
*~
*.swp
-build
-work
+*.iml
+.idea/
+/build/
+work/
+out/
.DS_Store
third_party/libmesos.so
third_party/libmesos.dylib
+conf/java-opts
+conf/spark-env.sh
+conf/log4j.properties
+target/
+reports/
+.project
+.classpath
+.scala_dependencies
+lib_managed/
+src_managed/
+project/boot/
+project/plugins/project/build.properties
+project/build/target/
+project/plugins/target/
+project/plugins/lib_managed/
+project/plugins/src_managed/
View
27 LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2010, Regents of the University of California.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the University of California, Berkeley nor the
+ names of its contributors may be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
View
@@ -1,79 +0,0 @@
-EMPTY =
-SPACE = $(EMPTY) $(EMPTY)
-
-# Build up classpath by concatenating some strings
-JARS = third_party/mesos.jar
-JARS += third_party/asm-3.2/lib/all/asm-all-3.2.jar
-JARS += third_party/colt.jar
-JARS += third_party/guava-r07/guava-r07.jar
-JARS += third_party/hadoop-0.20.0/hadoop-0.20.0-core.jar
-JARS += third_party/hadoop-0.20.0/lib/commons-logging-1.0.4.jar
-JARS += third_party/scalatest-1.2/scalatest-1.2.jar
-JARS += third_party/scalacheck_2.8.0-1.7.jar
-JARS += third_party/jetty-7.1.6.v20100715/jetty-server-7.1.6.v20100715.jar
-JARS += third_party/jetty-7.1.6.v20100715/servlet-api-2.5.jar
-JARS += third_party/apache-log4j-1.2.16/log4j-1.2.16.jar
-JARS += third_party/slf4j-1.6.1/slf4j-api-1.6.1.jar
-JARS += third_party/slf4j-1.6.1/slf4j-log4j12-1.6.1.jar
-CLASSPATH = $(subst $(SPACE),:,$(JARS))
-
-SCALA_SOURCES = src/examples/*.scala src/scala/spark/*.scala src/scala/spark/repl/*.scala
-SCALA_SOURCES += src/test/spark/*.scala src/test/spark/repl/*.scala
-
-JAVA_SOURCES = $(wildcard src/java/spark/compress/lzf/*.java)
-
-ifeq ($(USE_FSC),1)
- COMPILER_NAME = fsc
-else
- COMPILER_NAME = scalac
-endif
-
-ifeq ($(SCALA_HOME),)
- COMPILER = $(COMPILER_NAME)
-else
- COMPILER = $(SCALA_HOME)/bin/$(COMPILER_NAME)
-endif
-
-CONF_FILES = conf/spark-env.sh conf/log4j.properties conf/java-opts
-
-all: scala java conf-files
-
-build/classes:
- mkdir -p build/classes
-
-scala: build/classes java
- $(COMPILER) -d build/classes -classpath build/classes:$(CLASSPATH) $(SCALA_SOURCES)
-
-java: $(JAVA_SOURCES) build/classes
- javac -d build/classes $(JAVA_SOURCES)
-
-native: java
- $(MAKE) -C src/native
-
-jar: build/spark.jar build/spark-dep.jar
-
-dep-jar: build/spark-dep.jar
-
-build/spark.jar: scala java
- jar cf build/spark.jar -C build/classes spark
-
-build/spark-dep.jar:
- mkdir -p build/dep
- cd build/dep && for i in $(JARS); do jar xf ../../$$i; done
- jar cf build/spark-dep.jar -C build/dep .
-
-conf-files: $(CONF_FILES)
-
-$(CONF_FILES): %: | %.template
- cp $@.template $@
-
-test: all
- ./alltests
-
-default: all
-
-clean:
- $(MAKE) -C src/native clean
- rm -rf build
-
-.phony: default all clean scala java native jar dep-jar conf-files
View
28 README
@@ -1,24 +1,32 @@
+ONLINE DOCUMENTATION
+
+You can find the latest Spark documentation, including a programming guide,
+on the project wiki at http://github.com/mesos/spark/wiki. This file only
+contains basic setup instructions.
+
+
+
BUILDING
-Spark requires Scala 2.8. This version has been tested with 2.8.0.final.
+Spark requires Scala 2.8. This version has been tested with 2.8.1.final.
-To build and run Spark, you will need to have Scala's bin in your $PATH,
-or you will need to set the SCALA_HOME environment variable to point
-to where you've installed Scala. Scala must be accessible through one
-of these methods on Mesos slave nodes as well as on the master.
+The project is built using Simple Build Tool (SBT), which is packaged with it.
+To build Spark and its example programs, run sbt/sbt compile.
-To build Spark and the example programs, run make.
+To run Spark, you will need to have Scala's bin in your $PATH, or you
+will need to set the SCALA_HOME environment variable to point to where
+you've installed Scala. Scala must be accessible through one of these
+methods on Mesos slave nodes as well as on the master.
To run one of the examples, use ./run <class> <params>. For example,
-./run SparkLR will run the Logistic Regression example. Each of the
-example programs prints usage help if no params are given.
+./run spark.examples.SparkLR will run the Logistic Regression example.
+Each of the example programs prints usage help if no params are given.
All of the Spark samples take a <host> parameter that is the Mesos master
to connect to. This can be a Mesos URL, or "local" to run locally with one
thread, or "local[N]" to run locally with N threads.
-Tip: If you are building Spark and examples repeatedly, export USE_FSC=1
-to have the Makefile use the fsc compiler daemon instead of scalac.
+
CONFIGURATION
View
@@ -1,11 +0,0 @@
-#!/bin/bash
-FWDIR="`dirname $0`"
-if [ "x$SPARK_MEM" == "x" ]; then
- export SPARK_MEM=500m
-fi
-RESULTS_DIR="$FWDIR/build/test_results"
-if [ -d $RESULTS_DIR ]; then
- rm -r $RESULTS_DIR
-fi
-mkdir -p $RESULTS_DIR
-$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
View
@@ -1,4 +1,4 @@
--Dspark.shuffle.class=spark.TrackedCustomBlockedInMemoryShuffle
+-Dspark.shuffle.class=spark.CustomBlockedInMemoryShuffle
-Dspark.shuffle.masterHostAddress=127.0.0.1
-Dspark.shuffle.masterTrackerPort=22222
-Dspark.shuffle.trackerStrategy=spark.BalanceRemainingShuffleTrackerStrategy
View
@@ -10,4 +10,4 @@
# be in the same format as the JVM's -Xmx option, e.g. 300m or 1g).
# - SPARK_LIBRARY_PATH, to add extra search paths for native libraries.
-MESOS_HOME=/home/mosharaf/Work/mesos
+MESOS_HOME=/Users/mosharaf/Work/mesos
File renamed without changes.
File renamed without changes.
@@ -0,0 +1,11 @@
+Copyright 2009-2010 Ning, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
Binary file not shown.
File renamed without changes.
File renamed without changes.
Oops, something went wrong.

0 comments on commit ac7e066

Please sign in to comment.