-
Notifications
You must be signed in to change notification settings - Fork 0
/
Microphysics-gpu.ini
126 lines (102 loc) · 2.93 KB
/
Microphysics-gpu.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
[main]
testTopDir = /raid/testing/microphysics-gpu/
webTopDir = /raid/www/Microphysics/test-suite/gpu/
sourceTree = C_Src
numMakeJobs = 20
suiteName = Microphysics-gpu
goUpLink = 1
reportActiveTestsOnly = 1
COMP = gnu
add_to_c_make_command = USE_CUDA=TRUE CUDA_ARCH=70 TINY_PROFILE=TRUE
use_ctools = 0
purge_output = 1
summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK
#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1
# MPIcommand should use the placeholders:
# @host@ to indicate where to put the hostname to run on
# @nprocs@ to indicate where to put the number of processors
# @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.
MPIcommand = mpiexec -n @nprocs@ @command@
default_branch = development
# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/Microphysics/test-suite/gpu/
# slack
slack_post = 1
slack_webhookfile = /raid/testing/.slack.webhook
slack_channel = "#gpu"
slack_username = "i am groot"
[AMReX]
dir = /raid/testing/microphysics-gpu/amrex
branch = development
[source]
dir = /raid/testing/microphysics-gpu/Microphysics
branch = development
#branch = new_memory_subch
[test_eos_C-helmholtz]
buildDir = unit_test/test_eos
inputFile = input_eos
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 0
addToCompileString = EOS_DIR=helmholtz
compareFile = test_eos.helmholtz.cxx
runtime_params = do_cxx=1
[test_aprox_rates_C]
buildDir = unit_test/test_aprox_rates
inputFile = input_aprox_rates
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 0
addToCompileString = NETWORK_DIR=aprox19 INTEGRATOR_DIR=VODE
compareFile = test_aprox_rates.helmholtz.cxx
runtime_params = do_cxx=1
[test_screening_C]
buildDir = unit_test/test_screening/
inputFile = inputs
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 0
addToCompileString = NETWORK_DIR=aprox21 INTEGRATOR_DIR=VODE
compareFile = test_screening.screen5
runtime_params = do_cxx=1
[test_react_aprox13_C]
buildDir = unit_test/test_react/
inputFile = inputs_aprox13
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 0
addToCompileString = NETWORK_DIR=aprox13 INTEGRATOR_DIR=VODE
compareFile = react_aprox13_test_react.VODE.cxx
runtime_params = do_cxx=1
[test_react_C-nova]
buildDir = unit_test/test_react/
inputFile = inputs_nova
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 0
numthreads = 4
addToCompileString = NETWORK_DIR=nova
compareFile = react_nova_test_react.VODE.cxx
[test_react_C-subch_approx-VODE]
buildDir = unit_test/test_react/
inputFile = inputs_aprox21
dim = 3
link1File = helm_table.dat
useMPI = 0
useOMP = 1
numthreads = 4
addToCompileString = NETWORK_DIR=subch_approx INTEGRATOR_DIR=VODE
compareFile = react_subch_approx_test_react.VODE.cxx
runtime_params = prefix=react_subch_approx_ integrator.jacobian=1