Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions cscs-checks/apps/spark/spark_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import math

import reframe as rfm
import reframe.utility.sanity as sn
from reframe.core.launchers.registry import getlauncher


@rfm.simple_test
class SparkCheck(rfm.RunOnlyRegressionTest):
def __init__(self):
self.descr = 'Simple calculation of pi with Spark'
self.valid_systems = ['daint:gpu', 'daint:mc',
'dom:gpu', 'dom:mc']
self.valid_prog_environs = ['PrgEnv-gnu']
self.modules = ['Spark']
self.sourcesdir = None
self.pre_run = ['start-all.sh']
self.post_run = ['stop-all.sh']
self.num_tasks = 2
self.num_tasks_per_node = 1
pi_value = sn.extractsingle(r'Pi is roughly\s+(?P<pi>\S+)',
self.stdout, 'pi', float)
self.sanity_patterns = sn.assert_lt(sn.abs(pi_value - math.pi), 0.01)
self.maintainers = ['TM', 'TR']
self.tags = {'production'}

@rfm.run_before('run')
def prepare_run(self):
if self.current_partition.fullname in ['daint:gpu', 'dom:gpu']:
num_workers = 12
exec_cores = 3
else:
num_workers = 36
exec_cores = 9

self.variables = {
'SPARK_WORKER_CORES': '%s' % num_workers,
'SPARK_LOCAL_DIRS': '"/tmp"',
}
self.executable = (
'spark-submit --conf spark.default.parallelism=%s '
'--conf spark.executor.cores=%s --conf spark.executor.memory=15g '
'--master $SPARKURL --class org.apache.spark.examples.SparkPi '
'$EBROOTSPARK/examples/jars/spark-examples_2.11-2.3.1.jar 10000;'
% (num_workers, exec_cores))
# The job launcher has to be changed since the `spark-submit`
# script is not used with srun.
self.job.launcher = getlauncher('local')()