Skip to content

Commit

Permalink
Roll back non-MiMa-related changes (they'll go in later).
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Mar 11, 2016
1 parent dae4725 commit 373fd52
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -144,38 +144,10 @@ List<String> buildClassPath(String appClassPath) throws IOException {
boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
if (prependClasses || isTesting) {
String scala = getScalaVersion();
// All projects except assemblies:
List<String> projects = Arrays.asList(
"common/network-common",
"common/network-shuffle",
"common/network-yarn",
"common/sketch",
"common/tags",
"common/unsafe",
"core",
"examples",
"external/akka",
"external/docker-integration-tests",
"external/flume",
"external/flume-sink",
"external/kafka",
"external/kinesis-asl",
"external/mqtt",
"external/spark-ganglia-lgpl",
"external/twitter",
"external/zeromq",
"graphx",
"launcher",
"mllib",
"repl",
"sql/catalyst",
"sql/core",
"sql/hive",
"sql/hive-thriftserver",
"streaming",
"tools",
"yarn"
);
List<String> projects = Arrays.asList("core", "repl", "mllib", "graphx",
"streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
"yarn", "launcher",
"common/network-common", "common/network-shuffle", "common/network-yarn");
if (prependClasses) {
if (!isTesting) {
System.err.println(
Expand Down
27 changes: 4 additions & 23 deletions python/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,27 +54,10 @@ def print_red(text):
LOGGER = logging.getLogger()


def get_spark_dist_classpath():
original_working_dir = os.getcwd()
os.chdir(SPARK_HOME)
cp = subprocess_check_output(
["./build/sbt", "export assembly/managedClasspath"], universal_newlines=True)
cp = cp.strip().split("\n")[-1]
os.chdir(original_working_dir)
return cp


def run_individual_python_test(test_name, pyspark_python, spark_dist_classpath):
def run_individual_python_test(test_name, pyspark_python):
env = dict(os.environ)
env.update({
# Setting SPARK_DIST_CLASSPATH is a simple way to make sure that any child processes
# launched by the tests have access to the correct test-time classpath.
'SPARK_DIST_CLASSPATH': spark_dist_classpath,
'SPARK_TESTING': '1',
'SPARK_PREPEND_CLASSES': '1',
'PYSPARK_PYTHON': which(pyspark_python),
'PYSPARK_DRIVER_PYTHON': which(pyspark_python),
})
env.update({'SPARK_TESTING': '1', 'PYSPARK_PYTHON': which(pyspark_python),
'PYSPARK_DRIVER_PYTHON': which(pyspark_python)})
LOGGER.debug("Starting test(%s): %s", pyspark_python, test_name)
start_time = time.time()
try:
Expand Down Expand Up @@ -192,16 +175,14 @@ def main():
priority = 100
task_queue.put((priority, (python_exec, test_goal)))

spark_dist_classpath = get_spark_dist_classpath()

def process_queue(task_queue):
while True:
try:
(priority, (python_exec, test_goal)) = task_queue.get_nowait()
except Queue.Empty:
break
try:
run_individual_python_test(test_goal, python_exec, spark_dist_classpath)
run_individual_python_test(test_goal, python_exec)
finally:
task_queue.task_done()

Expand Down

0 comments on commit 373fd52

Please sign in to comment.