Skip to content

Commit

Permalink
updated job tests
Browse files Browse the repository at this point in the history
I think all job tests that need to be performed now work. The server side tests are tricky, not sure how to do that..
  • Loading branch information
louisponet committed Oct 15, 2017
1 parent 43b3dd2 commit e08c1ac
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 22 deletions.
4 changes: 3 additions & 1 deletion notes.txt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
TODO: redo all the Documentation!
TODO:
->fix atom issue in file processing!
->redo all the Documentation!
9 changes: 8 additions & 1 deletion src/file_processing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,13 @@ function read_qe_kpdos(filename::String,column=1;fermi=0)
return zmat',(ytickvals,yticks)
end


#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#Incomplete for now only allows for 1 atom of the same kind!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
"""
Reads a Quantum Espresso input file.
Returns a DFInput.
Expand Down Expand Up @@ -386,7 +393,7 @@ function write_job_files(df_job::DFJob)
write(f,"#!/bin/bash\n","#SBATCH -N 1\n","#SBATCH --ntasks-per-node=24 \n","#SBATCH --time=24:00:00 \n","#SBATCH -J $(df_job.job_name) \n",
"#SBATCH -p defpart\n\n","module load open-mpi/gcc/1.10.2\n","module load mkl/2016.1.056\n","\n")
for (i,(run_command,input)) in enumerate(df_job.flow)
filename = "$i"*df_job.job_name*"_$input"
filename = "$i"*df_job.job_name*"_$input"*".in"
push!(new_filenames,filename)
write_df_input(df_job.home_dir*filename,df_job.calculations[input])
write(f,"mpirun -np 24 $run_command <$filename> $(split(filename,".")[1]).out \n")
Expand Down
49 changes: 34 additions & 15 deletions src/job_control.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ function load_qe_job(job_name::String,df_job_dir::String,T=Float32;inputs=nothin
t_calcs = Dict{String,DFInput}()
flow = Array{Tuple{String,String},1}()
for (run_command,file) in inputs
t_calcs[split(file,"_")[end]] = read_qe_input(df_job_dir*file,T)
push!(flow,(run_command,split(file,"_")[end]))
t_calcs[split(file,"_")[end][1:end-3]] = read_qe_input(df_job_dir*file,T)
push!(flow,(run_command,split(file,"_")[end][1:end-3]))
end
if new_homedir!=nothing
return DFJob(job_name,t_calcs,flow,new_homedir,server,server_dir)
Expand All @@ -47,7 +47,7 @@ end

#---------------------------------BEGINNING GENERAL SECTION ---------------------#

#@TODO should we also create a config file for each job with stuff like server etc? and other config things,
#TODO should we also create a config file for each job with stuff like server etc? and other config things,
# which if not supplied could contain the default stuff?
"""
Pulls job from server. If no specific inputs are supplied it pulls all .in and .tt files.
Expand Down Expand Up @@ -88,7 +88,7 @@ function save_job(df_job::DFJob)
write_job_files(df_job)
end

#@Incomplete everything is hardcoded for now still!!!! make it configurable
#Incomplete everything is hardcoded for now still!!!! make it configurable
"""
Pushes a DFJob from it's local directory to its server side directory.
Expand All @@ -105,7 +105,7 @@ function push_job(df_job::DFJob)
end
end

#@TODO only uses qsub for now. how to make it more general?
#TODO only uses qsub for now. how to make it more general?
"""
Submit a DFJob. First saves it locally, pushes it to the server then runs the job file on the server.
Expand Down Expand Up @@ -155,12 +155,12 @@ function check_job_data(df_job,data_keys)
end

"""
Mutatatively change data that is tied to a DFJob. This means that it will run through all the DFInputs and their fieldnames and their Dicts. If it finds a Symbol in one of those that matches a symbol in the new data, it will replace the value of the first symbol with the new value.
change_job_data!(df_job::DFJob,new_data::Dict{Symbol,<:Any})
Input: df_job::DFJob,
new_data::Dict{Symbol,Any}
Mutatatively change data that is tied to a DFJob. This means that it will run through all the DFInputs and their fieldnames and their Dicts. If it finds a Symbol in one of those that matches a symbol in the new data, it will replace the value of the first symbol with the new value.
"""
function change_job_data!(df_job::DFJob,new_data::Dict{Symbol,Any})
function change_job_data!(df_job::DFJob,new_data::Dict{Symbol,<:Any})
found_keys = Symbol[]
for (key,calculation) in df_job.calculations
for name in fieldnames(calculation)[2:end]
data_dict = getfield(calculation,name)
Expand All @@ -169,6 +169,7 @@ function change_job_data!(df_job::DFJob,new_data::Dict{Symbol,Any})
for (flag,value) in block
if haskey(new_data,flag)
old_data = value
if !(flag in found_keys) push!(found_keys,flag) end
if typeof(old_data) == typeof(new_data[flag])
block[flag] = new_data[flag]
println("$key:\n -> $block_key:\n -> $flag:\n $old_data changed to: $(new_data[flag])")
Expand All @@ -181,6 +182,7 @@ function change_job_data!(df_job::DFJob,new_data::Dict{Symbol,Any})
else
for (data_key,data_val) in new_data
if haskey(data_dict,data_key)
if !(data_key in found_keys) push!(found_keys,data_key) end
old_data = data_dict[data_key]
if typeof(old_data) == typeof(data_val)
data_dict[data_key] = data_val
Expand All @@ -193,9 +195,17 @@ function change_job_data!(df_job::DFJob,new_data::Dict{Symbol,Any})
end
end
end
for key in found_keys
pop!(new_data,key)
end
if 1 < length(keys(new_data))
println("flags $(String.(collect(keys(new_data)))) were not found in any input file, please set them first!")
elseif length(keys(new_data)) == 1
println("flag '$(String(collect(keys(new_data))[1]))' was not found in any input file, please set it first!")
end
end

#@Incomplete this now assumes that there is only one calculation, would be better to interface with the flow of the DFJob
#Incomplete this now assumes that there is only one calculation, would be better to interface with the flow of the DFJob
"""
Sets mutatatively the job data in a calculation block of a DFJob. It will merge the supplied data with the previously present one in the block, changing all previous values to the new ones and adding non-existent ones.
Expand All @@ -204,12 +214,21 @@ Input: df_job::DFJob,
block_symbol::Symbol, -> Symbol of the datablock inside the calculation's input file.
data::Dict{Symbol,Any} -> flags and values to be set.
"""
function set_job_data!(df_job,calculation,block_symbol,data)
function set_job_data!(df_job::DFJob,calculation::String,block_symbol::Symbol,data)
if haskey(df_job.calculations,calculation)
setfield!(df_job.calculations[calculation],block_symbol,merge(getfield(df_job.calculations[calculation],block_symbol),data))
println("New input of $block_symbol in $calculation is:\n")
display(getfield(df_job.calculations[calculation],block_symbol))
println("\n")
if block_symbol == :control_blocks
for (block_key,block_dict) in data
df_job.calculations[calculation].control_blocks[block_key] = merge(df_job.calculations[calculation].control_blocks[block_key],data[block_key])
println("New input of block '$(String(block_key))' in '$(String(block_symbol))' of calculation '$calculation' is now:")
display(df_job.calculations[calculation].control_blocks[block_key])
println("\n")
end
else
setfield!(df_job.calculations[calculation],block_symbol,merge(getfield(df_job.calculations[calculation],block_symbol),data))
println("New input of '$block_symbol' in calculation '$calculation' is:\n")
display(getfield(df_job.calculations[calculation],block_symbol))
println("\n")
end
end
end

Expand Down
21 changes: 16 additions & 5 deletions test/job_control_tests.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
using DFControl, Base.Test
df_job = load_qe_job("test_job",joinpath(@__DIR__,"../assets/inputs/qe"))

df_job2 = load_qe_job("test_job",joinpath(@__DIR__,"../assets/inputs/qe"),new_homedir="blabla")
@test df_job2.home_dir == "blabla/"
@test length(df_job.flow) == 4
@test df_job.flow[3] == ("~/bin/pw.x","bands.in")
@test df_job.flow[3] == ("~/bin/pw.x","bands")
@test df_job.home_dir == joinpath(@__DIR__,"../assets/inputs/qe/")

mkdir(joinpath(@__DIR__,"../assets/inputs/qe/test_dir/"))
Expand Down Expand Up @@ -33,7 +34,17 @@ for file in files_to_remove
end
rm(test_dir)

change_data = Dict(:prefix=>"'test'",:noncolin => false, :ecutwfc=> 35)
change_data = Dict(:sk1=>3,:sk2=>3.2,:prefix=>"'test'",:noncolin => false, :ecutwfc=> 35,:test => true, :ion_dynamics=>true , :kaka=>'d')
change_data2 = Dict(:bleirgh => "'stuff'")
change_job_data!(df_job,change_data)
check_keys = keys(change_data)
@test check_job_data(df_job,check_keys) == change_data
change_job_data!(df_job,change_data2)
check_keys = Symbol[:sk1,:prefix,:noncolin,:ecutwfc]
@test check_job_data(df_job,check_keys) == Dict(:sk1=>3,:prefix=>"'test'",:noncolin => false, :ecutwfc=> 35)

set_data1 = Dict(:Ze => Point3D(1.2,3.2,1.2))
set_data2 = Dict(:control => Dict(:test => true))
set_job_data!(df_job,["bands","scf"],:atoms,set_data1)
set_job_data!(df_job,["bands","scf"],:control_blocks,set_data2)
@test df_job.calculations["bands"].control_blocks[:control][:test]
@test df_job.calculations["scf"].control_blocks[:control][:pseudo_dir] == "'./'"
@test df_job.calculations["scf"].atoms[:Ze] == Point3D(1.2,3.2,1.2)

0 comments on commit e08c1ac

Please sign in to comment.