Permalink
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
7431 lines (5894 sloc) 250 KB
#
# Collective Knowledge (program)
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net
#
cfg={} # Will be updated by CK (meta description of this module)
work={} # Will be updated by CK (temporal data)
ck=None # Will be updated by CK (initialized CK kernel)
# Local settings
sep='***************************************************************************************'
##############################################################################
# Initialize module
def init(i):
"""
Input: {}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
return {'return':0}
##############################################################################
# compile program
def process(i):
"""
Input: {
sub_action - clean, compile, run
(repo_uoa) - program repo UOA
(module_uoa) - program module UOA
data_uoa - program data UOA
(host_os) - host OS (detect, if omitted)
(target_os) - OS module to check (if omitted, analyze host)
(device_id) - device id if remote (such as adb)
(process_in_tmp) - (default 'yes') - if 'yes', clean, compile and run in the tmp directory
(tmp_dir) - (default 'tmp') - if !='', use this tmp directory to clean, compile and run
(generate_rnd_tmp_dir) - if 'yes', generate random tmp directory
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
Output of the last compile from function 'process_in_dir'
tmp_dir - directory where clean, compile, run
}
"""
import os
import copy
ic=copy.deepcopy(i)
# Check if global writing is allowed
r=ck.check_writing({})
if r['return']>0: return r
o=i.get('out','')
a=i.get('repo_uoa','')
m=i.get('module_uoa','')
duoa=i.get('data_uoa','')
lst=[]
if duoa=='':
# First, try to detect CID in current directory
r=ck.cid({})
if r['return']==0:
xruoa=r.get('repo_uoa','')
xmuoa=r.get('module_uoa','')
xduoa=r.get('data_uoa','')
rx=ck.access({'action':'load',
'module_uoa':xmuoa,
'data_uoa':xduoa,
'repo_uoa':xruoa})
if rx['return']==0 and rx['dict'].get('program','')=='yes':
duoa=xduoa
m=xmuoa
a=xruoa
if duoa=='':
# Attempt to load configuration from the current directory
try:
p=os.getcwd()
except OSError:
os.chdir('..')
p=os.getcwd()
pc=os.path.join(p, ck.cfg['subdir_ck_ext'], ck.cfg['file_meta'])
if os.path.isfile(pc):
r=ck.load_json_file({'json_file':pc})
if r['return']==0 and r['dict'].get('program','')=='yes':
d=r['dict']
ii=copy.deepcopy(ic)
ii['path']=p
ii['meta']=d
return process_in_dir(ii)
return {'return':1, 'error':'data UOA is not defined'}
# Check wildcards
r=ck.list_data({'repo_uoa':a, 'module_uoa':m, 'data_uoa':duoa})
if r['return']>0: return r
lst=r['lst']
if len(lst)==0:
return {'return':1, 'error':'no program(s) found'}
r={'return':0}
for ll in lst:
p=ll['path']
ruid=ll['repo_uid']
muid=ll['module_uid']
duid=ll['data_uid']
dalias=ll['data_uoa']
r=ck.access({'action':'load',
'repo_uoa':ruid,
'module_uoa':muid,
'data_uoa':duid})
if r['return']>0: return r
d=r['dict']
if o=='con':
ck.out('')
ck.out('* '+dalias+' ('+duid+')')
ck.out('')
ii=copy.deepcopy(ic)
ii['meta']=d
# Check if base_uoa suggests to use another program path
buoa=d.get('base_uoa','')
if buoa!='':
rx=ck.access({'action':'find',
'module_uoa':muid,
'data_uoa':buoa})
if rx['return']>0:
return {'return':1, 'error':'problem finding base entry '+buoa+' ('+rx['error']+')'}
p=rx['path']
ii['path']=p
ii['repo_uoa']=ruid
ii['module_uoa']=muid
ii['data_uoa']=duid
ii['data_alias']=dalias
r=process_in_dir(ii)
if r['return']>0: return r
return r
##############################################################################
# compile, run and clean a given CK program (called from universal functions here)
def process_in_dir(i):
"""
Input: {
Comes from 'compile', 'run' and 'clean' functions
sub_action - clean, compile, run
(host_os) - host OS (detect, if omitted)
(target_os) - OS module to check (if omitted, analyze host)
(device_id) - device id if remote (such as adb)
(target) - target machine added via 'ck add machine' with prepared target description
(useful to create farms of machines for crowd-benchmarking and crowd-tuning using CK)
(device_cfg) - extra device cfg (if empty, will be filled in from 'machine' module description)
(compute_platform_id) - if !='', set env['CK_COMPUTE_PLATFORM_ID']
(compute_device_id) - if !='', set env['CK_COMPUTE_DEVICE_ID']
path - path
meta - program description
(generate_rnd_tmp_dir) - if 'yes', generate random tmp directory to compile and run program
(useful during crowd-tuning)
(compiler_vars) - dict with set up compiler flags (-D var)
they will update the ones defined as default in program description ...
(no_vars) - skip compiler vars (if you want to use default ones from the sources) ...
(compiler_tags) - extra compiler tags
(remove_compiler_vars) - list of compiler vars to remove
(extra_env_for_compilation) - set environment variables before compiling program
(flags) - compile flags
(lflags) - link flags
(speed) - if 'yes', compile for speed (use env CK_OPT_SPEED from compiler)
(size) - if 'yes', compile for size (use env CK_OPT_SIZE from compiler)
(compile_type) - static or dynamic (dynamic by default;
however takes compiler default_compile_type into account)
or
(static or dynamic)
(repeat) - repeat kernel via environment CT_REPEAT_MAIN if supported
(sudo) - if 'yes', force using sudo
(if not set up in OS, use ${CK_SUDO_INIT}, ${CK_SUDO_PRE}, ${CK_SUDO_POST})
(affinity) - set processor affinity for tihs program run (if supported by OS - see "affinity" in OS)
examples: 0 ; 0,1 ; 0-3 ; 4-7 (the last two can be useful for ARM big.LITTLE arhictecture
(clean) - if 'yes', clean tmp directory before using
(skip_clean_after) - if 'yes', do not remove run batch
(keep) - the same as skip_clean_after
(repo_uoa) - program repo UOA
(module_uoa) - program module UOA
(data_uoa) - program data UOA
(params) - dictionary with parameters passed via pre/post processing to third-party tools
for example, to configure ARM Workload Automation
(params.{KEY}) - set params[KEY]=value (user-friendly interface via CMD)
(misc) - misc dict
(characteristics) - characteristics/features/properties
(env) - preset environment
(env.{KEY}) - set env[KEY]=value (user-friendly interface via CMD)
(deps.{KEY}) - set deps[KEY]["uoa']=value (user-friendly interface via CMD to set any given dependency)
(preset_deps) - dict with {"KEY":"UOA"} to preset dependencies
(post_process_script_uoa) - run script from this UOA
(post_process_subscript) - subscript name
(post_process_params) - (string) add params to CMD
(deps) - already resolved deps (useful for auto-tuning)
(deps_cache) - list of already resolved deps (useful to automate crowd-benchmarking and crowd-tuning)
(reuse_deps) - if 'yes', reuse deps by keys
(cmd_key) - CMD key
(dataset_uoa) - UOA of a dataset
(dataset_file) - dataset filename (if more than one inside one entry - suggest to have a UID in name)
(extra_env) - extra environment before running code as string
(pre_run_cmd) - pre CMD for binary
(extra_run_cmd) - extra CMD (can use $#key#$ for autotuning)
(debug_run_cmd) - substitute CMD with this one - usually useful for debugging to pre-set env for all deps
(run_cmd_substitutes) - dict with substs ($#key#$=value) in run CMD (useful for CMD autotuning)
(console) - if 'yes', output to console
(skip_device_init) - if 'yes', do not initialize device
(skip_calibration) - if 'yes', skip execution time calibration (make it around 4.0 sec)
(calibration_time) - calibration time in string, 4.0 sec. by default
(calibration_max) - max number of iterations for calibration, 10 by default
(pull_only_timer_files) - if 'yes', pull only timer files, but not output files
(useful for remove devices during statistical repetition)
(energy) - if 'yes', start energy monitoring (if supported) using script ck-set-power-sensors
Also, set compiler var CK_MONITOR_ENERGY=1 and run-time var CK_MONITOR_ENERGY=1
Note: files, monitored for energy, are defined in system environment.
For example, odroid .profile as:
export CK_ENERGY_FILES="/sys/bus/i2c/drivers/INA231/3-0040/sensor_W;/sys/bus/i2c/drivers/INA231/3-0041/sensor_W;/sys/bus/i2c/drivers/INA231/3-0044/sensor_W;/sys/bus/i2c/drivers/INA231/3-0045/sensor_W;"
(run_output_files) - extra list of output files (useful to add in pipeline to collect profiling from Android mobile, for example)
(extra_post_process_cmd) - append at the end of execution bat (for example, to call gprof ...)
(statistical_repetition_number) - int number of current (outside) statistical repetition
to avoid pushing data to remote device if !=0 ...
(autotuning_iteration) - int number of current autotuning iteration
to avoid pushing some data to remote device if !=0 ...
(skip_dataset_copy) - if 'yes', dataset stays the same across iterations of pipeline, so do not copy to remote again
(unparsed) - if executing ck run program ... -- (unparsed params), add them to compile or run ...
(compile_timeout) - (sec.) - kill compile job if too long
(run_timeout) - (sec.) - kill run job if too long
(add_rnd_extension_to_bin) - if 'yes', add random extension to binary and record list
(add_save_extension_to_bin) - if 'yes', add '.save' to bin to save during cleaning ...
(skip_print_timers) - if 'yes', skip printing fine-grain timers after execution
(skip_file_print) - skip file printing (if 'print_files_after_run' list is in program meta)
(skip_output_validation) - skip validation of output (dangerous during auto-tuning -
some optimizations may break semantics or change accuracy)
(output_validation_repo) - output validation repo UOA (when recording new output)
(program_output_uoa) - use this UOA to check/record program output
(to have the same output entry for groups of similar programs)
(overwrite_reference_output) - if 'yes', overwrite reference output (useful if broken)
(quiet) - if 'yes', automatically provide default answer to all questions when resolving dependencies ...
(random) - if 'yes', select deps randomly (useful for quite crowd-tuning / DNN classification)
(install_to_env) - install dependencies to env instead of CK-TOOLS (to keep it clean)!
(safe) - safe mode when searching packages first instead of detecting already installed soft
(to have more deterministic build)
(skip_exec) - if 'yes', do not clean output files and skip exec to be able to continue
post-processing during debuging
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
misc - updated misc dict
characteristics - updated characteristics
env - updated environment
deps - resolved deps, if any
}
"""
import os
import time
import sys
import shutil
import time
import copy
start_time=time.time()
sys.stdout.flush()
o=i.get('out','')
oo=''
if o=='con': oo='con'
sa=i['sub_action']
sdi=i.get('skip_device_init','')
sca=i.get('skip_clean_after','')
if sca=='':
sca=i.get('keep','')
grtd=i.get('generate_rnd_tmp_dir','')
quiet=i.get('quiet','')
ran=i.get('random','')
iev=i.get('install_to_env','')
safe=i.get('safe','')
skip_exec=i.get('skip_exec','')
misc=i.get('misc',{})
ccc=i.get('characteristics',{})
env=i.get('env',{})
xparams=i.get('params',{})
deps=i.get('deps',{})
reuse_deps=i.get('reuse_deps','')
deps_cache=i.get('deps_cache',[])
# Check user-friendly env and params
preset_deps=i.get('preset_deps', {})
for q in i:
if q.startswith('env.'):
env[q[4:]]=i[q]
elif q.startswith('params.'):
xparams[q[7:]]=i[q]
elif q.startswith('deps.'):
preset_deps[q[5:]]=i[q]
are=i.get('add_rnd_extension_to_bin','')
ase=i.get('add_save_extension_to_bin','')
rof=i.get('run_output_files',[])
eppc=i.get('extra_post_process_cmd','')
unparsed=i.get('unparsed', [])
sunparsed=''
for q in unparsed:
if sunparsed!='': sunparsed+=' '
sunparsed+=q
sfp=i.get('skip_file_print','')
ee=i.get('extra_env','')
ercmd=i.get('extra_run_cmd','')
drcmd=i.get('debug_run_cmd','')
prcmd=i.get('pre_run_cmd','')
rcsub=i.get('run_cmd_substitutes','')
cons=i.get('console','')
flags=i.get('flags','')
lflags=i.get('lflags','')
cv=i.get('compiler_vars',{})
ncv=i.get('no_vars',{})
ctags=i.get('compiler_tags','')
rcv=i.get('remove_compiler_vars',[])
eefc=i.get('extra_env_for_compilation',{})
fspeed=i.get('speed','')
fsize=i.get('size','')
xrepeat=i.get('repeat','')
if xrepeat=='': xrepeat='-1'
repeat=int(xrepeat)
me=i.get('energy','')
xcto=i.get('compile_timeout','')
xrto=i.get('run_timeout','')
pp_uoa=i.get('post_process_script_uoa','')
pp_name=i.get('post_process_subscript','')
pp_params=i.get('post_process_params','')
# Check if need to initialize device and directly update input i !
r=ck.access({'action':'find',
'module_uoa':cfg['module_deps']['module'],
'data_uoa':cfg['module_deps']['machine']})
if r['return']==0:
ii={'action':'init',
'module_uoa':cfg['module_deps']['machine'],
'input':i}
if sa=='run':
ii['check']='yes'
r=ck.access(ii)
if r['return']>0: return r
device_cfg=i.get('device_cfg',{})
# Check host/target OS/CPU
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
# Get some info about platforms
ii={'action':'detect',
'module_uoa':cfg['module_deps']['platform.os'],
'host_os':hos,
'target_os':tos,
'device_cfg':device_cfg,
'device_id':tdid,
'skip_device_init':sdi}
if sa=='run':
x='no'
if i.get('skip_info_collection','')!='': x=i['skip_info_collection']
ii['skip_info_collection']=x
ii['out']=o
else:
ii['skip_info_collection']='yes'
r=ck.access(ii)
if r['return']>0: return r
hos=r['host_os_uid']
hosx=r['host_os_uoa']
hosd=r['host_os_dict']
tos=r['os_uid']
tosx=r['os_uoa']
tosd=r['os_dict']
tplat=tosd.get('ck_name','')
tplat2=tosd.get('ck_name2','')
host_add_path_string=r.get('host_add_path_string','')
target_add_path_string=r.get('target_add_path_string','')
if r['device_id']!='': tdid=r['device_id']
xtdid=''
if tdid!='': xtdid=' -s '+tdid
remote=tosd.get('remote','')
remote_ssh=tosd.get('remote_ssh','')
tbits=tosd.get('bits','')
# Update env for host from host/target OS desc if needed (for example for RPC)
x=hosd.get('preset_host_env',{})
if len(x)>0:
env.update(x)
x=tosd.get('preset_host_env',{})
if len(x)>0:
env.update(x)
# Add path to CK target entry if used (to get machine specific files if needed)
x=device_cfg.get('path_to_ck_target_entry','')
if x!='':
env['CK_TARGET_PATH']=x
# update misc
misc['host_os_uoa']=hosx
misc['target_os_uoa']=tosx
misc['target_os_bits']=tbits
misc['device_id']=tdid
# Check compile type
ctype=i.get('compile_type','')
if i.get('static','')=='yes': ctype='static'
if i.get('dynamic','')=='yes': ctype='dynamic'
# On default Android-32, use static by default
# (old platforms has problems with dynamic)
if ctype=='':
if tosd.get('default_compile_type','')!='':
ctype=tosd['default_compile_type']
else:
ctype='dynamic'
# Get host platform type (linux or win)
rx=ck.get_os_ck({})
if rx['return']>0: return rx
hplat=rx['platform']
bbp=hosd.get('batch_bash_prefix','')
bbpt=tosd.get('batch_bash_prefix','')
rem=hosd.get('rem','')
eset=hosd.get('env_set','')
etset=tosd.get('env_set','')
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
scall=hosd.get('env_call','')
sdirs=hosd.get('dir_sep','')
sdirsx=tosd.get('remote_dir_sep','')
if sdirsx=='': sdirsx=sdirs
stdirs=tosd.get('dir_sep','')
sext=hosd.get('script_ext','')
sexe=hosd.get('set_executable','')
se=tosd.get('file_extensions',{}).get('exe','')
sbp=hosd.get('bin_prefix','')
stbp=tosd.get('bin_prefix','')
sqie=hosd.get('quit_if_error','')
evs=hosd.get('env_var_separator','')
envsep=hosd.get('env_separator','')
envtsep=tosd.get('env_separator','')
eifs=hosd.get('env_quotes_if_space','')
eifsc=hosd.get('env_quotes_if_space_in_call','')
eifsx=tosd.get('remote_env_quotes_if_space','')
if eifsx=='': eifsx=eifsc
wb=tosd.get('windows_base','')
stro=tosd.get('redirect_stdout','')
stre=tosd.get('redirect_stderr','')
ubtr=hosd.get('use_bash_to_run','')
no=tosd.get('no_output','')
bex=hosd.get('batch_exit','')
md5sum=hosd.get('md5sum','')
# env for targets
tsvarb=tosd.get('env_var_start','')
tsvarb1=tosd.get('env_var_extra1','')
tsvare=tosd.get('env_var_stop','')
tsvare1=tosd.get('env_var_extra2','')
########################################################################
p=i['path']
meta=i['meta']
ruoa=i.get('repo_uoa', '')
muoa=i.get('module_uoa', '')
duoa=i.get('data_uoa', '') # There will be data UID (not alias) from 'process' function from this module!
dalias=i.get('data_alias','')
########################################################################
# Check if correct target OS
r=ck.access({'action':'check_target',
'module_uoa':cfg['module_deps']['soft'],
'dict':meta,
'host_os_uoa':hos,
'host_os_dict':hosd,
'target_os_uoa':tos,
'target_os_dict':tosd})
if r['return']>0: return r
# Check if need specific device access type
dat=meta.get('required_device_access_type',[])
if len(dat)>0 and device_cfg.get('access_type','') not in dat:
return {'return':1, 'error':'This program can not be used with the specified device target (need '+str(dat)+')'}
target_exe=meta.get('target_file','')
if target_exe=='' and meta.get('no_target_file','')!='yes':
target_exe=cfg.get('target_file','')
if are=='yes' and target_exe!='':
rx=ck.gen_uid({})
if rx['return']>0: return rx
target_exe+='-'+rx['data_uid']
if (meta.get('skip_bin_ext','')!='yes' or tplat=='win') and target_exe!='':
target_exe+=se
if ase=='yes' and target_exe!='':
target_exe+='.save'
if target_exe!='':
misc['target_exe']=target_exe
if meta.get('version','')!='':
misc['program_version']=meta['version']
# If muoa=='' assume program
if muoa=='':
muoa=work['self_module_uid']
if duoa=='':
x=meta.get('backup_data_uid','')
if x!='':
duoa=meta['backup_data_uid']
# Reuse compile deps in run (useful for large benchmarks such as SPEC where compile and run is merged)
rcd=meta.get('reuse_compile_deps_in_run','')
# Check if compile in tmp dir
cdir=p
os.chdir(cdir)
# Prepare params (if needed - for example, for ARM Workload Automation)
params=meta.get('default_params',{})
r=ck.merge_dicts({'dict1':params, 'dict2':xparams})
if r['return']>0: return r
########################################################################
# Check if need to add paths to CK entries as env
qq=meta.get('ck_to_env',{})
for q in qq:
qc=qq[q]
r=ck.access({'action':'find',
'cid':qc})
if r['return']>0: return r
env[q]=r['path']
########################################################################
# Check affinity
aff=i.get('affinity','')
if aff!='':
aff=tosd.get('set_affinity','').replace('$#ck_affinity#$',aff)
########################################################################
# Check sudo
sudo_init=tosd.get('sudo_init','')
if sudo_init=='': sudo_init=svarb+svarb1+'CK_SUDO_INIT'+svare1+svare
sudo_pre=tosd.get('sudo_pre','')
if sudo_pre=='': sudo_pre=svarb+svarb1+'CK_SUDO_PRE'+svare1+svare
# sudo_post=tosd.get('sudo_post','')
# if sudo_post=='':
sudo_post=svarb+svarb1+'CK_SUDO_POST'+svare1+svare
isd=i.get('sudo','')
if isd=='': isd=tosd.get('force_sudo','')
srn=ck.get_from_dicts(i, 'statistical_repetition_number', '', None)
if srn=='': srn=0
else: srn=int(srn)
ati=ck.get_from_dicts(i, 'autotuning_iteration', '', None)
if ati=='': ati=0
else: ati=int(ati)
sdc=ck.get_from_dicts(i, 'skip_dataset_copy', '', None)
##################################################################################################################
################################### Clean ######################################
if sa=='clean':
# Get host platform type (linux or win)
cmd=cfg.get('clean_cmds',{}).get(hplat)
if o=='con':
ck.out(cmd)
ck.out('')
if ubtr!='': cmd=ubtr.replace('$#cmd#$',cmd)
rx=os.system(cmd)
# Removing only 1 tmp directory. If there are multiple - may be used for crowdtuning - do not delete
try:
curdir=os.getcwd()
except OSError:
os.chdir('..')
curdir=os.getcwd()
q=os.path.join(curdir, 'tmp')
if os.path.isdir(q):
shutil.rmtree(q, ignore_errors=True)
# for q in os.listdir(curdir):
# if not os.path.isfile(q) and q.startswith('tmp'):
# shutil.rmtree(q, ignore_errors=True)
return {'return':0}
# shall we process_in_tmp or not?
#
process_in_tmp = i.get('process_in_tmp', meta.get('process_in_tmp', 'yes') ).lower() == 'yes'
td=''
if process_in_tmp:
tdx=i.get('tmp_dir','')
td=tdx
if td=='': td='tmp'
if i.get('clean','')=='yes':
if td!='' and os.path.isdir(td):
# cxx1=os.getcwd()
# os.chdir(os.path.join(p,td))
# cmd=cfg.get('clean_cmds',{}).get(hplat)
# if o=='con':
# ck.out(cmd)
# ck.out('')
# if ubtr!='': cmd=ubtr.replace('$#cmd#$',cmd)
# rx=os.system(cmd)
# os.chdir(cxx1)
shutil.rmtree(td, ignore_errors=True)
if tdx=='' and grtd=='yes':
# Generate tmp dir
import tempfile
fd, fn=tempfile.mkstemp(suffix='', prefix='tmp-ck-')
os.close(fd)
os.remove(fn)
td=os.path.basename(fn)
cdir=os.path.join(p, td)
misc['tmp_dir']=td
misc['path']=p
if cdir!='' and not os.path.isdir(cdir):
time.sleep(1)
try:
os.makedirs(cdir)
except Exception as e:
pass
if not os.path.isdir(cdir):
return {'return':1, 'error':'can\'t create tmp directory ('+cdir+')'}
sb='' # Batch
# If extra paths
if host_add_path_string!='' and (remote!='yes' or sa!='run'):
sb+=host_add_path_string+'\n\n'
if sa=='run' and target_add_path_string!='':
sb+=target_add_path_string+'\n\n'
if o=='con':
ck.out(sep)
ck.out('Current directory: '+cdir)
try:
odir=os.getcwd()
except OSError:
os.chdir('..')
odir=os.getcwd()
os.chdir(cdir)
try:
rcdir=os.getcwd()
except OSError:
os.chdir('..')
rcdir=os.getcwd()
# If run and dynamic or reuse compile deps, check deps prepared by compiler
fdeps=cfg.get('deps_file','')
if meta.get('skip_tmp_deps','')!='yes' and len(deps)==0 and sa=='run' and (rcd=='yes' or ctype=='dynamic'):
if os.path.isfile(fdeps):
if o=='con':
ck.out('')
ck.out('Reloading depedencies from compilation '+fdeps+' ...')
rx=ck.load_json_file({'json_file':fdeps})
if rx['return']>0: return rx
deps=rx['dict']
# If compile type is dynamic, reuse deps even for run (to find specific DLLs)
# (REMOTE PLATFORMS ARE NOT SUPPORTED AT THE MOMENT, USE STATIC COMPILATION)
# if (ctype=='dynamic' or sa=='compile' or rcd=='yes'):
# Resolve deps (unless should be explicitly ignored, such as when installing local version with all dependencies set)
if len(deps)==0:
deps=meta.get('compile_deps',{})
if len(deps)==0:
deps=meta.get('deps',{})
if remote=='yes' and sa=='run' and 'android' in tosd.get('tags',[]) and 'adb' not in deps:
deps['adb']={
"force_target_as_host": "yes",
"local": "yes",
"name": "adb tool",
"sort": -10,
"tags": "tool,adb"
}
if len(deps)>0:
if o=='con':
ck.out(sep)
# Add extra compiler flags
if ctags!='' and 'compiler' in deps:
xctags=deps['compiler'].get('tags','')
if xctags!='':
xctags+=','
xctags+=ctags
deps['compiler']['tags']=xctags
# Check user-friendly deps
for q in preset_deps:
if q in deps:
deps[q]['uoa']=preset_deps[q]
ii={'action':'resolve',
'module_uoa':cfg['module_deps']['env'],
'host_os':hos,
'target_os':tos,
'device_id':tdid,
'deps':deps,
'deps_cache':deps_cache,
'reuse_deps':reuse_deps,
'add_customize':'yes',
'random':ran,
'quiet':quiet,
'install_to_env':iev,
'safe':safe}
if o=='con': ii['out']='con'
rx=ck.access(ii)
if rx['return']>0: return rx
if sa=='compile' or remote!='yes':
sb+=no+rx['bat']
deps=rx['deps'] # Update deps (add UOA)
if sa=='compile':
rx=ck.save_json_to_file({'json_file':fdeps, 'dict':deps})
if rx['return']>0: return rx
# If compiler, load env
comp=deps.get('compiler',{})
comp_uoa=comp.get('uoa','')
dcomp={}
if comp_uoa!='':
rx=ck.access({'action':'load',
'module_uoa':cfg['module_deps']['env'],
'data_uoa':comp_uoa})
if rx['return']>0: return rx
dcomp=rx['dict']
# Add energy monitor, if needed and if supported
sspm1=tosd.get('script_start_power_monitor','')
sspm2=tosd.get('script_stop_power_monitor','')
if me=='yes' and sspm1!='':
if o=='con':
ck.out('')
ck.out('Adding energy monitor')
ck.out('')
sb+='\n'
sb+=scall+' '+sspm1+'\n'
sb+='\n'
##################################################################################################################
################################### Compile ######################################
if sa=='compile':
# Clean target file
if target_exe!='' and os.path.isfile(target_exe):
os.remove(target_exe)
if sa=='compile' or sa=='get_compiler_version':
# Check if pre-process script
x=meta.get('use_preprocess_compilation_scripts',{})
if len(x)>0:
xam=x.get('module_uoa','')
if xam=='': xam=work['self_module_uid']
xad=x.get('data_uoa','')
r=ck.access({'action':'find',
'module_uoa':xam,
'data_uoa':xad})
if r['return']>0: return r
ppp1=r['path']
# Check if has custom script
cs=None
csn=x.get('script_name','')
if csn=='': csn='custom'
rx=ck.load_module_from_path({'path':ppp1, 'module_code_name':csn, 'skip_init':'yes'})
if rx['return']>0: return rx
cs=rx['code']
csf=x.get('script_func','')
if csf=='': csf='setup'
if csf not in dir(cs):
return {'return':1, 'error':'function '+csf+' not found in script '+csn+' in path '+ppp1}
# Call customized script
ii={"host_os_uoa":hosx,
"host_os_uid":hos,
"host_os_dict":hosd,
"target_os_uoa":tosx,
"target_os_uid":tos,
"target_os_dict":tosd,
"target_device_id":tdid,
"meta":meta,
"env":env,
"deps":deps,
"self_cfg":cfg,
"ck_kernel":ck
}
if o=='con': ii['interactive']='yes'
if i.get('quiet','')=='yes': ii['interactive']=''
script_func=getattr(cs, csf)
rx=script_func(ii)
if rx['return']>0: return rx
# Update install env from customized script (if needed)
new_env=rx.get('install_env',{})
if len(new_env)>0:
env.update(new_env)
# Process compile vars
compile_vars=meta.get('compile_vars',{})
for q in compile_vars:
if q not in env:
x=compile_vars[q]
try:
x=x.replace('$#src_path#$', src_path)
except Exception as e: # need to detect if not string (not to crash)
pass
env[q]=x
# Update env from deps
# for kkd in sorted(deps, key=lambda kk: deps[kk].get('sort',0)):
# for kkd1 in deps[kkd].get('dict',{}).get('env',{}):
# if kkd1 not in env:
# env[kkd1]=deps[kkd]['dict']['env'][kkd1]
# Add compiler dep again, if there
cb=deps.get('compiler',{}).get('bat','')
if cb!='' and not sb.endswith(cb):
sb+='\n'+no+cb.strip()+' 1\n' # We set 1 to tell environment that it should set again even if it was set before
# for kkd1 in deps['compiler'].get('dict',{}).get('env',{}):
# if kkd1 not in env:
# env[kkd1]=deps['compiler']['dict']['env'][kkd1]
# Add other deps at the end if needed
for q in deps:
x=deps[q]
if x.get('add_to_the_end_of_bat','')=='yes' and x.get('bat','')!='':
y=x['bat']
if not sb.endswith(y):
sb+='\n'+no+y.strip()+' 1\n' # We set 1 to tell environment that it should set again even if it was set before
# Add env
for k in sorted(env):
v=str(env[k])
v=v.replace('$<<',svarb).replace('>>$',svare)
if eifs!='' and wb!='yes':
if v.find(' ')>=0 and not v.startswith(eifs):
v=eifs+v+eifs
sb+=no+eset+' '+k+'='+str(v)+'\n'
sb+='\n'
# Try to detect version
csd=deps.get('compiler',{}).get('dict',{})
csuoa=csd.get('soft_uoa','')
fp=csd.get('customize',{}).get('full_path','')
uloc=csd.get('customize',{}).get('use_locale_for_version','')
cver=''
if csuoa!='':
r=ck.access({'action':'internal_detect',
'module_uoa':cfg['module_deps']['soft'],
'tool':fp,
'uoa':csuoa,
'env':cb,
'use_locale':uloc,
'con':o})
if r['return']==0:
cver=r['version_str']
misc['compiler_detected_ver_list']=r['version_lst']
misc['compiler_detected_ver_str']=cver
misc['compiler_detected_ver_raw']=r['version_raw']
if o=='con':
ck.out(sep)
ck.out('Detected compiler version: '+cver)
ck.out('')
if sa=='compile':
# Check linking libs + include paths for all deps
sll=''
sin=''
for k in sorted(deps, key=lambda kk: deps[kk].get('sort',0)):
depsk=deps[k]
kv=depsk.get('cus',{})
# Process include
pl3l=kv.get('path_include','')
pl3ll=kv.get('path_includes',[])
if pl3l not in pl3ll:
pl3ll.append(pl3l)
for pl3 in pl3ll:
if pl3!='':
if pl3.endswith('\\'): pl3=pl3[:-1] # otherwise can be problems on Windows ...
if sin!='': sin+=' '
sin+=svarb+svarb1+'CK_FLAG_PREFIX_INCLUDE'+svare1+svare+eifsc+pl3+eifsc
# Process lib (if not skipped)
if depsk.get('skip_linking','')=='yes':
continue
pl1=kv.get('path_lib','')
if pl1=='': pl1=kv.get('path_static_lib','')
pl1d=kv.get('path_dynamic_lib','')
if pl1d=='': pl1d=pl1
# Check if extra
extra_libs=depsk.get('extra_libs',[])
els=[]
cus_extra_libs=kv.get('extra_static_libs',{})
if len(cus_extra_libs)==0: cus_extra_libs=kv.get('extra_dynamic_libs',{})
for el in extra_libs:
x=cus_extra_libs.get(el,'')
if x=='':
return {'return':1, 'error':'library '+el+'is not defined in dependencies'}
els.append(x)
x=kv.get('static_lib','')
if x=='' and ctype=='dynamic' and kv.get('dynamic_lib','')!='': x=kv['dynamic_lib']
els.append(x)
# Check if force to add library path (-L)
path_added=False
if tplat!='win' and depsk.get('force_add_static_lib_path','')=='yes':
sll+=' '+svarb+svarb1+'CK_FLAG_PREFIX_LIB_DIR'+svare1+svare+eifsc+pl1d+eifsc
path_added=True
for pl2 in els:
if pl2!='':
if sll!='': sll+=' '
if ctype=='dynamic' and wb!='yes' and (remote=='yes' or pl1d!='') and csd.get('customize',{}).get('can_strip_dynamic_lib','')=='yes':
pl2x=os.path.splitext(pl2)[0]
if pl2x.startswith('lib'): pl2x=pl2x[3:]
if not path_added:
if pl1d.endswith('\\'): pl1d=pl1d[:-1] # otherwise can be problems on Windows ...
sll+=' '+svarb+svarb1+'CK_FLAG_PREFIX_LIB_DIR'+svare1+svare+eifsc+pl1d+eifsc
path_added=True
sll+=' -l'+pl2x
else:
sll+=eifsc
if pl1!='':
sll+=pl1+sdirs
sll+=pl2
sll+=eifsc
evr=depsk.get('extra_ld_vars','')
if evr!='':
evr=evr.replace('$<<',svarb).replace('>>$',svare)
sll+=' '+evr
# Check if local includes
linc=meta.get('include_dirs',[])
if len(linc)>0:
for q in linc:
# Check if source from another entry (species)
full_path=''
if q.startswith('$#ck_take_from_{'):
r9=substitute_some_ck_keys({'string':q})
if r9['return']>0: return r9
x=r9['string']
else:
if td!='': full_path='..'+sdirs
else: full_path=''
x=os.path.join(full_path,q)
if x.endswith('\\'): x=x[:-1] # otherwise can be problems on Windows ...
if sin!='': sin+=' '
sin+=svarb+svarb1+'CK_FLAG_PREFIX_INCLUDE'+svare1+svare+eifsc+x+eifsc
# Check if includes as environment var (we search in env settings,
# not in real env, otherwise, can have problems, when concatenating -I with empty string)
line=meta.get('compiler_add_include_as_env_from_deps',[])
xline=[]
for qq in line:
if qq.find('$<<')<0 and qq.find('>>$')<0:
qq='$<<'+qq+'>>$'
jq1=qq.find('$<<')
while jq1>=0:
jq2=qq.find('>>$')
if jq2>0:
q=qq[jq1+3:jq2]
qx=''
for g1 in deps:
gg=deps[g1]
gge=gg.get('dict',{}).get('env',{})
xgge=gge.get(q,'')
if xgge!='':
qx=xgge
break
qq=qq[:jq1]+qx+qq[jq2+3:]
jq1=qq.find('$<<')
else:
return {'return':1, 'error':'inconsistency in "compiler_add_include_as_env_from_deps" key in program meta'}
xline.append(qq)
for xgge in xline:
if xgge!='':
if xgge.endswith('\\'): xgge=xgge[:-1] # otherwise can be problems on Windows ...
if sin!='': sin+=' '
sin+=svarb+svarb1+'CK_FLAG_PREFIX_INCLUDE'+svare1+svare+eifsc+xgge+eifsc
# Obtaining compile CMD (first from program entry, then default from this module)
ccmds=meta.get('compile_cmds',{})
ccmd=ccmds.get(hplat,{})
if len(ccmd)==0:
ccmd=ccmds.get('default',{})
if len(ccmd)==0:
ccmds=cfg.get('compile_cmds',{})
ccmd=ccmds.get(hplat,{})
if len(ccmd)==0:
ccmd=ccmds.get('default',{})
sccmd=ccmd.get('cmd','')
if sccmd=='':
return {'return':1, 'error':'compile CMD is not found'}
sccmd=sccmd.replace('$#script_ext#$',sext)
sccmd=sccmd.replace('$#dir_sep#$',stdirs)
sccmd=sccmd.replace('$<<',svarb).replace('>>$',svare)
# Source files
sfs=meta.get('source_files',[])
compiler_env=''
if hplat=='win':
compiler_env=meta.get('compiler_env_win','')
if compiler_env=='':
compiler_env=meta.get('compiler_env','')
if compiler_env=='':
compiler_env='CK_CC'
sfprefix='..'+sdirs
scfb=''
flags_def=''
if fspeed=='yes':
scfb+=' '+svarb+'CK_OPT_SPEED'+svare+' '
flags_def+=' '+svarb+'CK_OPT_SPEED'+svare+' '
elif fsize=='yes':
flags_def+=' '+svarb+'CK_OPT_SIZE'+svare+' '
scfb+=svarb+'CK_FLAGS_CREATE_OBJ'+svare
scfb+=' '+svarb+'CK_COMPILER_FLAGS_OBLIGATORY'+svare
if ctype=='dynamic':
scfb+=' '+svarb+'CK_FLAGS_DYNAMIC_BIN'+svare
elif ctype=='static':
scfb+=' '+svarb+'CK_FLAGS_STATIC_BIN'+svare
if meta.get('skip_local_include','')!='yes':
scfb+=' '+svarb+svarb1+'CK_FLAG_PREFIX_INCLUDE'+svare1+svare+sfprefix
scfa=''
# Check build -D flags
sbcv=''
bcv={}
if ncv!='yes':
bcv=meta.get('build_compiler_vars',{})
for q in rcv:
if q in bcv: del(bcv[q])
bcv.update(cv)
bcv['CK_HOST_OS_NAME_'+hosd.get('ck_name','').upper()]='1'
bcv['CK_HOST_OS_NAME2_'+hosd.get('ck_name2','').upper()]='1'
bcv['CK_TARGET_OS_NAME_'+tosd.get('ck_name','').upper()]='1'
bcv['CK_TARGET_OS_NAME2_'+tosd.get('ck_name2','').upper()]='1'
# Update env if energy meter
if me=='yes':
bcv['CK_MONITOR_ENERGY']='1'
if o=='con' and len(bcv)>0:
ck.out(sep)
ck.out('Compiler vars:')
if meta.get('skip_compiler_vars','')!='yes':
for k in sorted(bcv):
kv=bcv[k]
if sbcv!='': sbcv+=' '
sbcv+=svarb+svarb1+'CK_FLAG_PREFIX_VAR'+svare1+svare+k
if kv!='': sbcv+='='+str(kv)
if o=='con':
ck.out(' '+k+'='+str(kv))
# Check if compiler flags as environment variable
cfev=''
if hplat=='win':
cfev=meta.get('compiler_flags_as_env_win','')
if cfev=='':
cfev=meta.get('compiler_flags_as_env','')
if cfev!='':
cfev=cfev.replace('$<<',svarb).replace('>>$',svare)
sbcv+=' '+cfev
# Check if has customization scripts
scus=os.path.join(cdir,'..','customize'+sext)
if os.path.isfile(scus):
sb+='\n'+scall+' '+scus+'\n\n'
# Prepare compilation
sb+='\n'
denv=dcomp.get('env',{})
sobje=denv.get('CK_OBJ_EXT','')
sofs=''
xsofs=[]
if ee!='':
sb+='\n'+no+ee+'\n\n'
if o=='con': ck.out(sep)
# Compilation flags
xcfb=scfb
if sbcv!='': xcfb+=' '+sbcv
if sin!='': xcfb+=' '+sin
xcfb+=' '+flags
# Linking flags
slfb=svarb+'CK_COMPILER_FLAGS_OBLIGATORY'+svare
slfb+=' '+lflags
if ctype=='dynamic':
slfb+=' '+svarb+'CK_FLAGS_DYNAMIC_BIN'+svare
elif ctype=='static':
slfb+=' '+svarb+'CK_FLAGS_STATIC_BIN'+svare
slfa=''
if target_exe!='':
slfa=' '+svarb+svarb1+'CK_FLAGS_OUTPUT'+svare1+svare+target_exe
slfa+=' '+svarb+'CK_LD_FLAGS_MISC'+svare
slfa+=' '+svarb+'CK_LD_FLAGS_EXTRA'+svare
evrf=meta.get('extra_ld_vars_first','')
if evrf!='':
evrf=evrf.replace('$<<',svarb).replace('>>$',svare)
slfa+=' '+evrf
if sll!='': slfa+=' '+sll
evr=''
if hplat=='win':
evr=meta.get('extra_ld_vars_win','')
if evr=='':
evr=meta.get('extra_ld_vars','')
if evr!='':
evr=evr.replace('$<<',svarb).replace('>>$',svare)
slfa+=' '+evr
# Check if includes as environment var
llinkle=meta.get('linker_add_lib_as_env',[])
if len(llinkle)>0:
for q in llinkle:
if slfa!='': slfa+=' '
slfa+=svarb+svarb1+q+svare1+svare
# Check if call compile CMD only once with all files
if meta.get('use_compile_script','')=='yes':
cc=sccmd
# Add compiler and linker flags as environment
sb+='\n'
genv={'CK_PROG_COMPILER_FLAGS_BEFORE':xcfb,
'CK_PROG_LINKER_FLAGS_BEFORE':slfb,
'CK_PROG_LINKER_FLAGS_AFTER':slfa,
'CK_PROG_COMPILER_VARS':sbcv,
'CK_PROG_COMPILER_FLAGS':flags_def+' '+flags,
'CK_PROG_LINKER_LIBS':sll,
'CK_PROG_TARGET_EXE':target_exe}
extcomp=meta.get('extra_env_for_compilation',{})
if len(extcomp)>0:
genv.update(extcomp)
if len(eefc)>0:
genv.update(eefc)
for gg in genv:
gx=genv[gg]
if eifs!='': gx=gx.replace(eifs, '\\'+eifs)
sb+=no+eset+' '+gg+'='+eifs+gx+eifs+'\n'
sb+='echo '+eifs+cc+eifs+'\n'
sb+=no+cc+'\n'
sb+=no+sqie+'\n'
sb+='\n'
else:
for sf in sfs:
sf=sf.strip()
xcfa=scfa
# Check if source from another entry (species)
full_path=''
if sf.startswith('$<<'):
full_path=sf.replace('$<<',svarb).replace('>>$',svare)
elif sf.startswith('$#ck_take_from_{'):
b2=sf.find('}#$')
if b2=='':
return {'return':1, 'error':'can\'t parse source file '+sf+' ...'}
bb=sf[16:b2]
rb=ck.access({'action':'load',
'module_uoa':muoa,
'data_uoa':bb})
if rb['return']>0:
return {'return':1, 'error':'can\'t find sub-entry '+bb}
sf=sf[b2+3:]
full_path=os.path.join(rb['path'],sf)
else:
full_path=os.path.join(sfprefix,sf)
sf0,sf1=os.path.splitext(sf)
sf00=os.path.basename(sf)
sf00a,sf00b=os.path.splitext(sf00)
sfobj=sf00a+sobje
if sofs!='': sofs+=' '
sofs+=sfobj
xsofs.append(sfobj)
if 'CK_FLAGS_OUTPUT' in denv:
xcfa+=' '+svarb+svarb1+'CK_FLAGS_OUTPUT'+svare1+svare+sfobj
cc=sccmd
cc=cc.replace('$#source_file#$', full_path)
cc=cc.replace('$#compiler#$', svarb+compiler_env+svare)
cc=cc.replace('$#flags_before#$', xcfb)
cc=cc.replace('$#flags_after#$', xcfa)
if sunparsed!='': cc+=' '+sunparsed
sb+='echo '+eifs+cc+eifs+'\n'
sb+=no+cc+'\n'
sb+=no+sqie+'\n'
sb+='\n'
# Obtaining link CMD (first from program entry, then default from this module)
if sofs!='':
linker_env=meta.get('linker_env','')
if linker_env=='': linker_env=compiler_env
lcmds=meta.get('link_cmds',{})
lcmd=lcmds.get(hplat,{})
if len(lcmd)==0:
lcmd=lcmds.get('default',{})
if len(lcmd)==0:
lcmds=cfg.get('link_cmds',{})
lcmd=lcmds.get(hplat,{})
if len(lcmd)==0:
lcmd=lcmds.get('default',{})
slcmd=lcmd.get('cmd','')
if slcmd!='':
slfb=svarb+'CK_COMPILER_FLAGS_OBLIGATORY'+svare
slfb+=' '+lflags
if ctype=='dynamic':
slfb+=' '+svarb+'CK_FLAGS_DYNAMIC_BIN'+svare
elif ctype=='static':
slfb+=' '+svarb+'CK_FLAGS_STATIC_BIN'+svare
slfa=''
if target_exe!='':
slfa=' '+svarb+svarb1+'CK_FLAGS_OUTPUT'+svare1+svare+target_exe
slfa+=' '+svarb+'CK_LD_FLAGS_MISC'+svare
slfa+=' '+svarb+'CK_LD_FLAGS_EXTRA'+svare
evrf=meta.get('extra_ld_vars_first','')
if evrf!='':
evrf=evrf.replace('$<<',svarb).replace('>>$',svare)
slfa+=' '+evrf
if sll!='': slfa+=' '+sll
evr=''
if hplat=='win':
evr=meta.get('extra_ld_vars_win','')
if evr=='':
evr=meta.get('extra_ld_vars','')
if evr!='':
evr=evr.replace('$<<',svarb).replace('>>$',svare)
slfa+=' '+evr
# Check if includes as environment var
llinkle=meta.get('linker_add_lib_as_env',[])
if len(llinkle)>0:
for q in llinkle:
if slfa!='': slfa+=' '
slfa+=svarb+svarb1+q+svare1+svare
cc=slcmd
cc=cc.replace('$#linker#$', svarb+linker_env+svare)
cc=cc.replace('$#obj_files#$', sofs)
cc=cc.replace('$#flags_before#$', slfb)
cc=cc.replace('$#flags_after#$', slfa)
sb+='echo '+eifs+cc+eifs+'\n'
sb+=no+cc+'\n'
sb+=no+sqie+'\n'
# Add objdump
if target_exe!='':
if meta.get('skip_objdump','')!='yes':
sb+='\n'+no+svarb+'CK_OBJDUMP'+svare+' '+target_exe+' '+stro+' '+target_exe+'.dump'+'\n'
# Add md5sum
if meta.get('skip_md5sum','')!='yes':
x='<'
# if hplat=='win':x=''
sb+='\n'+no+md5sum+' '+x+' '+target_exe+'.dump '+stro+' '+target_exe+'.md5'+'\n'
# Add git hash (if supported)
xnull='/dev/null'
if hplat=='win': xnull='null'
sb+='\n'+no+'git rev-parse HEAD '+stro+' '+target_exe+'.git_hash'+' '+stre+xnull+'\n'
# Stop energy monitor, if needed and if supported
if me=='yes' and sspm2!='':
if o=='con':
ck.out('')
ck.out('Adding energy monitor')
ck.out('')
sb+='\n'
sb+=scall+' '+sspm2+'\n'
sb+='\n'
# Add exit /0 if needed (on Windows git and md5sum can mess up return code)
if bex!='':
sb+='\n\n'+bex.replace('$#return_code#$','0')
# Record to tmp batch and run
rx=ck.gen_tmp_file({'prefix':'tmp-', 'suffix':sext, 'remove_dir':'yes'})
if rx['return']>0: return rx
fn=rx['file_name']
rx=ck.save_text_file({'text_file':fn, 'string':sb})
if rx['return']>0: return rx
y=''
if sexe!='':
y+=sexe+' '+sbp+fn+envsep
y+=' '+scall+' '+sbp+fn
if o=='con':
ck.out('')
ck.out('Executing prepared batch file '+fn+' ...')
ck.out('')
sys.stdout.flush()
start_time1=time.time()
if ubtr!='': y=ubtr.replace('$#cmd#$',y)
############################################## Compiling code here ##############################################
rx=0
ry=ck.system_with_timeout({'cmd':y, 'timeout':xcto})
rry=ry['return']
if rry>0:
if rry!=8: return ry
else:
rx=ry['return_code']
comp_time=time.time()-start_time1
ccc['compilation_time']=comp_time
if sca!='yes':
if fn!='' and os.path.isfile(fn): os.remove(fn)
git_hash=''
# Try to read git hush file
if os.path.isfile(target_exe+'.git_hash'):
rz=ck.load_text_file({'text_file':target_exe+'.git_hash'})
if rz['return']==0:
git_hash=rz['string'].strip()
ccc['program_git_hash']=git_hash
ofs=0
tbs=0
md5=''
if rry==8:
misc['compilation_success']='no'
misc['compilation_success_bool']=False
misc['fail_reason']=ry['error']
ccc['compilation_success']='no'
ccc['compilation_success_bool']=False
ccc['fail_reason']=ry['error']
elif rx>0:
misc['compilation_success']='no'
misc['compilation_success_bool']=False
misc['fail_reason']='return code '+str(rx)+' !=0 '
ccc['compilation_success']='no'
ccc['compilation_success_bool']=False
ccc['fail_reason']='return code '+str(rx)+' !=0 '
else:
misc['compilation_success']='yes'
misc['compilation_success_bool']=True
ccc['compilation_success']='yes'
ccc['compilation_success_bool']=True
# Check some characteristics
if os.path.isfile(target_exe):
ccc['binary_size']=os.path.getsize(target_exe)
ofs=ccc['binary_size']
tbs=ofs
# Try to read md5 file
if os.path.isfile(target_exe+'.md5'):
rz=ck.load_text_file({'text_file':target_exe+'.md5'})
if rz['return']==0:
md5x=rz['string']
ix=md5x.find(' ')
if ix>0:
md5=md5x[:ix].strip()
ccc['md5_sum']=md5
# Check obj file sizes
if len(xsofs)>0:
ofs=0
ccc['obj_sizes']={}
for q in xsofs:
if os.path.isfile(q):
ofs1=os.path.getsize(q)
ccc['obj_sizes'][q]=ofs1
ofs+=ofs1
ccc['obj_size']=ofs
ccc['compilation_time_with_module']=time.time()-start_time
if o=='con':
s=''
if meta.get('no_compile','')=='yes':
s='Warning: This program doesn\'t require compilation ...'
else:
s='Compilation time: '+('%.3f'%comp_time)+' sec.'
if meta.get('no_target_file','')!='yes':
s+='; Object size: '+str(ofs)+'; Total binary size: '+str(tbs)+'; MD5: '+md5
ck.out(sep)
ck.out(s)
if misc.get('compilation_success','')=='no':
ck.out('Warning: compilation failed!')
##################################################################################################################
################################### Run ######################################
elif sa=='run':
start_time=time.time()
# Remote dir
if remote=='yes':
rdir=tosd.get('remote_dir','')
if rdir!='' and not rdir.endswith(stdirs): rdir+=stdirs
if td!='': rdir+=td
if rdir!='' and not rdir.endswith(stdirs): rdir+=stdirs
src_path_local=p+sdirs
if remote=='yes' and remote_ssh!='yes':
src_path=rdir
else:
src_path=src_path_local
sc=i.get('skip_calibration','')
xcalibrate_time=i.get('calibration_time','')
if xcalibrate_time=='': xcalibrate_time=cfg['calibration_time']
calibrate_time=float(xcalibrate_time)
# Figure out the "active" cmd key
run_cmds=meta.get('run_cmds',{})
if len(run_cmds)==0:
return {'return':1, 'error':'no CMD for run'}
kcmd=i.get('cmd_key','')
krun_cmds=sorted(list(run_cmds.keys()))
if kcmd=='':
if len(krun_cmds)>1:
zz={}
iz=0
for z in sorted(krun_cmds):
add=True
# Check if skip by deps tags
skp=run_cmds[z].get('skip_if_deps_tags',[])
for sk in skp:
if len(sk)>0:
for skx in deps:
sktags=deps[skx].get('dict',{}).get('tags',[])
found=True
for skt in sk:
if skt not in sktags:
found=False
break
if found:
add=False
break
if not add:
break
if add:
# Check if add only by deps tags
aif=run_cmds[z].get('add_only_if_deps_tags',[])
if len(aif)>0:
add=False
for sk in aif:
if len(sk)>0:
for skx in deps:
sktags=deps[skx].get('dict',{}).get('tags',[])
found=True
for skt in sk:
if skt not in sktags:
found=False
break
if found:
add=True
break
if add:
break
if add:
zz[str(iz)]=z
iz+=1
if len(zz)>0:
if len(zz)==1:
x='0'
else:
ck.out('')
ck.out('More than one commmand line is found to run this program:')
ck.out('')
for iz in range(0, len(zz)):
zs=str(iz)
z=zz[zs]
zcmd=run_cmds[z].get('run_time',{}).get('run_cmd_main','')
if zcmd!='': z+=' ('+zcmd+')'
ck.out(zs+') '+z)
ck.out('')
rx=ck.inp({'text':'Select command line (or Enter to select 0): '})
x=rx['string'].strip()
if x=='': x='0'
if x not in zz:
return {'return':1, 'error':'command line number is not recognized'}
kcmd=zz[x]
else:
return {'return':1, 'error':'no CMD for run for these software dependencies'}
else:
kcmd=krun_cmds[0]
else:
if kcmd not in krun_cmds:
return {'return':1, 'error':'CMD key not found in program description'}
# Command line key is set
vcmd=run_cmds[kcmd]
misc['cmd_key']=kcmd
# Update environment with defaults (run_vars are runtime environment defaults)
run_vars = meta.get('run_vars',{}).copy() # first load ground-level-precedence defaults for all commands
run_vars.update( vcmd.get('run_vars',{}) ) # then override with higher-precedence defaults for this specific command
for q in run_vars:
if q not in env:
x=run_vars[q]
try:
x=x.replace('$#src_path#$', src_path)
except Exception as e: # need to detect if not string (not to crash)
pass
env[q]=x
# Update env if repeat
if sc!='yes' and 'CT_REPEAT_MAIN' in run_vars:
if repeat!=-1:
if 'CT_REPEAT_MAIN' not in run_vars:
return {'return':1, 'error':'this program is not supporting execution time calibration'}
env['CT_REPEAT_MAIN']=str(repeat) # it is fixed by user
sc='yes'
else:
repeat=int(run_vars.get('CT_REPEAT_MAIN','1'))
env['CT_REPEAT_MAIN']='$#repeat#$' # find later
# Update env if energy meter
if me=='yes':
env['CK_MONITOR_ENERGY']='1'
env['XOPENME_FILES']=svarb+svarb1+'CK_ENERGY_FILES'+svare1+svare
# Check run-time deps
rx=update_run_time_deps({'host_os':hos,
'target_os':tos,
'target_id':tdid,
'deps':deps,
'deps_cache':deps_cache,
'reuse_deps':reuse_deps,
'meta':meta,
'cmd_key':kcmd,
'cmd_meta':vcmd,
'out':oo,
'install_to_env':iev,
'env_for_resolve':env,
'preset_deps':preset_deps,
'random':ran,
'safe':safe,
'quiet':quiet})
if rx['return']>0: return rx
aenv=rx.get('aggregated_env',{})
if rx.get('resolve',{}).get('bat','')!='':
if remote!='yes':
sb+=no+rx['resolve']['bat']
##################################################
c=''
rt=vcmd.get('run_time',{})
rif=rt.get('run_input_files',[])
rifo={}
# Check if dynamic and remote to copy .so to devices (but for the 1st autotuning and statistical iteration!)
# unless explicitly forbidden (such as libOpenCL ...)
if ctype=='dynamic' and remote=='yes':
if srn==0 and ati==0:
for q in deps:
qq=deps[q].get('cus',{})
qdl=qq.get('dynamic_lib','')
if qq.get('skip_copy_to_remote','')!='yes':
if qdl!='':
qpl=qq.get('path_lib','')
qq1=os.path.join(qpl,qdl)
if os.path.isfile(qq1) and not qq1.endswith('.a'):
rif.append(qq1)
rifo[qq1]='yes' # if pushing to external, do not use current path
aef=qq.get('adb_extra_files',[])
for qq1 in aef:
rif.append(qq1)
rifo[qq1]='yes' # if pushing to external, do not use current path
# Check if run_time env is also defined
rte=rt.get('run_set_env2',{})
if len(rte)>0:
env.update(rte)
# Check GPGPU
compute_platform_id=i.get('compute_platform_id','')
compute_device_id=i.get('compute_device_id','')
# Check if need to select GPGPU
ngd=rt.get('need_compute_device','')
if ngd!='':
if o=='con':
ck.out(sep)
ck.out('Detecting GPGPU targets ...')
ck.out('')
ii={'action':'detect',
'module_uoa':cfg['module_deps']['platform.gpgpu'],
'host_os':hos,
'target_os':tos,
'device_id':tdid,
'compute_platform_id':compute_platform_id,
'compute_device_id':compute_device_id,
'type':ngd,
# 'deps':xdeps,
'select':'yes',
'sudo':isd,
'out':oo,
'quiet':quiet}
target=i.get('target','')
if target!='': ii['target']=target
r=ck.access(ii)
if r['return']>0: return r
compute_platform_id=r.get('choices',{}).get('compute_platform_id','')
compute_device_id=r.get('choices',{}).get('compute_device_id','')
if 'add_to_features' not in misc: misc['add_to_features']={}
misc['add_to_features']['gpgpu']=r.get('features',{}).get('gpgpu',{})
if 'add_to_choices' not in misc: misc['add_to_choices']={}
misc['add_to_choices']['compute_platform_id']=compute_platform_id
misc['add_to_choices']['compute_device_id']=compute_device_id
# Finish GPGPU selection, if needed
if compute_platform_id!='':
env['CK_COMPUTE_PLATFORM_ID']=compute_platform_id
if compute_device_id!='':
env['CK_COMPUTE_DEVICE_ID']=compute_device_id
# Check APK
apk=meta.get('apk',{})
if len(apk)>0:
if o=='con':
ck.out(sep)
ck.out('Detecting/installing required APK ...')
ck.out('')
ix={'action':'install',
'module_uoa':cfg['module_deps']['apk'],
'host_os':hos,
'target_os':tos,
'device_id':tdid,
'out':oo}
ix.update(apk)
r=ck.access(ix)
if r['return']>0:
if r['return']==16:
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=r['error']
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
return r
if 'add_to_features' not in misc: misc['add_to_features']={}
misc['add_to_features']['apk']=r.get('params',{})
# Update env from deps
# for kkd in sorted(deps, key=lambda kk: deps[kk].get('sort',0)):
# for kkd1 in deps[kkd].get('dict',{}).get('env',{}):
# if kkd1 not in env:
# env[kkd1]=deps[kkd]['dict']['env'][kkd1]
# Check specialized env by OS from deps
for k in deps:
xenv=deps[k].get('cus',{}).get('env_by_os',{}).get(tplat2,{})
if len(xenv)>0:
for k1 in xenv:
if env.get(k1,'')=='':
env[k1]=xenv[k1]
# Add compiler dep again, if there (otherwise some libs can set another compiler)
x=deps.get('compiler',{}).get('bat','')
if remote!='yes' and x!='' and not sb.endswith(x):
sb+='\n'+no+x.strip()+' 1\n' # We set 1 to tell environment that it should set again even if it was set before
# for kkd1 in deps['compiler'].get('dict',{}).get('env',{}):
# if kkd1 not in env:
# env[kkd1]=deps['compiler']['dict']['env'][kkd1]
# Check if need to remove some env before run (useful for remote devices)
for k in meta.get('remove_env_before_run',[]):
if k in env:
del(env[k])
# Command line preparation
c=rt.get('run_cmd_main','')
if remote=='yes' and rt.get('run_cmd_main_remote','')!='':
c=rt['run_cmd_main_remote']
if drcmd!='':
c=drcmd
if c=='':
return {'return':1, 'error':'cmd is not defined'}
if remote=='yes':
c=c.replace('$<<','${').replace('>>$','}')
else:
c=c.replace('$<<',svarb+svarb1).replace('>>$',svare1+svare)
c=c.replace('$#script_ext#$',sext)
c=c.replace('$#dir_sep#$',stdirs)
up_dir=''
if remote!='yes': up_dir='../'
c=c.replace('$#up_dir#$',up_dir)
# Add extra before CMD if there ...
c=prcmd+' '+c
# Replace bin file
te=target_exe
if meta.get('skip_add_prefix_for_target_file','')!='yes':
te=stbp+te
# Check if affinity
if aff!='':
te=aff+' '+te
c=c.replace('$#BIN_FILE#$', te)
c=c.replace('$#os_dir_separator#$', stdirs)
x=''
if remote_ssh!='yes':
x='..'+stdirs
c=c.replace('$#previous_dir#$', x)
c=c.replace('$#src_path#$', src_path)
c=c.replace('$#env1#$',svarb)
c=c.replace('$#env2#$',svare)
# Update keys in run cmd (useful for CMD autotuning)
for k in rcsub:
xv=rcsub[k]
c=c.replace('$#'+k+'#$',str(xv))
# Check if takes datasets from CK
dtags=vcmd.get('dataset_tags',[])
# Check if need to add dataset file as JSON to run_vars
adfe=vcmd.get('add_dataset_file_to_env','')
edtags=i.get('extra_dataset_tags', [])
if len(dtags)>0 and len(edtags)>0:
dtags += edtags
dmuoa=cfg['module_deps']['dataset']
dduoa=i.get('dataset_uoa','')
dfile=''
dfile_keys=[]
if dduoa!='' or len(dtags)>0:
if dduoa=='':
misc['dataset_tags']=dtags
dtags_csv = ','.join(dtags)
rx=ck.access({'action':'search',
'module_uoa':dmuoa,
'tags':dtags_csv,
'add_info':'yes'})
if rx['return']>0: return rx
lst=rx['lst']
if len(lst)==0:
return {'return':1, 'error':'no related datasets found (tags='+dtags_csv+')'}
elif len(lst)==1:
dduoa=lst[0].get('data_uid','')
else:
ck.out('')
ck.out('More than one dataset entry is found for this program:')
ck.out('')
# zz={}
# iz=0
# for z1 in sorted(lst, key=lambda v: v['data_uoa']):
# z=z1['data_uid']
# zu=z1['data_uoa']
#
# zs=str(iz)
# zz[zs]=z
#
# ck.out(zs+') '+zu+' ('+z+')')
#
# iz+=1
#
# ck.out('')
# rx=ck.inp({'text':'Select dataset UOA (or Enter to select 0): '})
# x=rx['string'].strip()
# if x=='': x='0'
#
# if x not in zz:
# return {'return':1, 'error':'dataset number is not recognized'}
#
# dduoa=zz[x]
r=ck.access({'action':'select_uoa',
'module_uoa':cfg['module_deps']['choice'],
'choices':lst})
if r['return']>0: return r
dduoa=r['choice']
ck.out('')
if dduoa=='':
return {'return':1, 'error':'dataset is not specified'}
misc['dataset_uoa']=dduoa
# If remote, init
if remote=='yes':
rs=tosd['remote_shell'].replace('$#device#$',xtdid)
rse=tosd.get('remote_shell_end','')+' '
if sdi!='yes':
ck.out(sep)
r=ck.access({'action':'init_device',
'module_uoa':cfg['module_deps']['platform'],
'os_dict':tosd,
'device_id':tdid,
'out':o})
if r['return']>0: return r
# Try to create directories
x=rs+' '+tosd['make_dir']+rdir+' '+rse
if o=='con':
ck.out('')
ck.out('Executing: '+x)
r=os.system(x)
# If remote and target exe
if remote=='yes' and target_exe!='' and srn==0:
if srn==0:
# Copy exe to remote
ry=copy_file_to_remote({'target_os_dict':tosd,
'device_id':tdid,
'file1':target_exe,
'file2':rdir+target_exe,
'out':oo})
if ry['return']>0: return ry
# Set chmod
se=tosd.get('set_executable','')
if se!='':
y=rs+' '+se+' '+rdir+target_exe+' '+rse
if o=='con':
ck.out(sep)
ck.out(y)
ck.out('')
ry=os.system(y)
if ry>0:
return {'return':1, 'error':'making binary executable failed on remote device'}
# Loading dataset
dset={}
dp=''
dfiles=[]
ddalias=''
dduid=''
if dduoa!='':
rx=ck.access({'action':'load',
'module_uoa':dmuoa,
'data_uoa':dduoa})
if rx['return']>0: return rx
ddalias=rx['data_alias']
dduid=rx['data_uid']
dd=rx['dict']
dp=rx['path']
xdp=dp+sdirs
if remote=='yes':
c=c.replace('$#dataset_path#$','')
else:
c=c.replace('$#dataset_path#$',xdp)
sb+='\n'+no+etset+' CK_DATASET_PATH='+xdp+'\n'
dset['path']=xdp
dfiles=dd.get('dataset_files',[])
if len(dfiles)>0:
dfile=i.get('dataset_file','')
if dfile!='':
dfiles=[dfile]
misc['dataset_file']=dfile
elif len(dfiles)>0:
if len(dfiles)==1:
dfile=dfiles[0]
else:
# Check if has description:
desc_dfiles=[]
desc_dfiles1=dd.get('desc_dataset_files',{})
for q in dfiles:
x=desc_dfiles1.get(q,{}).get('name','')
if x=='': x=q
desc_dfiles.append(x)
ck.out('************ Selecting dataset file ...')
ck.out('')
r=ck.access({'action':'select_list',
'module_uoa':cfg['module_deps']['choice'],
'choices':dfiles,
'desc':desc_dfiles})
if r['return']>0: return r
dfile=r['choice']
if dfile!='':
env['CK_DATASET_FILENAME']=dfile
# sb+='\n'+no+eset+' CK_DATASET_FILENAME='+dfile+'\n'
dset['file']=dfile
# Check if need to add to env
if adfe=='yes':
jdfile=os.path.join(xdp,os.path.splitext(dfile)[0]+'.json')
# Attempt to load json data sets file
rk=ck.load_json_file({'json_file':jdfile})
if rk['return']>0: return rk
xxd=rk['dict']
# Smart update - if already there, do not update
dfile_keys=list(xxd.keys())
for k in xxd:
if env.get(k,'')=='':
env[k]=xxd[k]
xdfiles=[] # put selected file first
if dfile=='':
xdfiles=dfiles
else:
xdfiles.append(dfile)
for df in dfiles:
if df!=dfile:
xdfiles.append(df)
for k in range(0, len(xdfiles)):
df=dfiles[k]
# if dfile!='' and k==0:
# df=dfile
kk='$#dataset_filename'
if k>0: kk+='_'+str(k)
kk+='#$'
c=c.replace(kk, df)
if remote=='yes' and srn==0 and sdi!='yes' and sdc!='yes':
# Check if only selected to send
if vcmd.get('send_only_selected_file','')=='yes' and dfile!=df:
continue
# check if also extra files
dfx=[df]
dfx1=dd.get('extra_dataset_files',{}).get(df,[])
for dfy in dfx1:
if dfy not in dfx:
dfx.append(dfy)
for dfz in dfx:
df0, df1 = os.path.split(dfz)
# Push data files to device
y=tosd.get('remote_push_pre','').replace('$#device#$',xtdid)
if y!='':
y=y.replace('$#file1#$', os.path.join(dp,dfz))
y=y.replace('$#file1s#$', df1)
y=y.replace('$#file2#$', rdir+stdirs+dfz)
if o=='con':
ck.out(sep)
ck.out(y)
ck.out('')
ry=os.system(y)
if ry>0:
return {'return':1, 'error':'copying to remote device failed'}
# Push data files to device, if first time
y=tosd['remote_push'].replace('$#device#$',xtdid)
y=y.replace('$#file1#$', os.path.join(dp,dfz))
y=y.replace('$#file1s#$', df1)
y=y.replace('$#file2#$', rdir+stdirs+dfz)
if o=='con':
ck.out(sep)
ck.out(y)
ck.out('')
ry=os.system(y)
if ry>0:
return {'return':1, 'error':'copying to remote device failed'}
rcm=dd.get('cm_properties',{}).get('run_time',{}).get('run_cmd_main',{})
for k in rcm:
kv=rcm[k]
c=c.replace('$#'+k+'#$',kv)
misc['dataset_uoa']=dduoa
# Add env to batch
sb+='\n'
sbenv=''
for k in sorted(env):
v=str(env[k])
v=v.replace('$<<',tsvarb).replace('>>$',tsvare)
if eifsx!='' and wb!='yes':
if v.find(' ')>=0 and not v.startswith(eifsx):
v=eifsx+v+eifsx
sbenv+=no+etset+' '+k+'='+str(v)+'\n'
sb+=sbenv+'\n'
if tosd.get('extra_env','')!='':
sb+=no+tosd['extra_env']+'\n'
# Check if need to add env with current path
if remote=='yes' and len(tosd.get('remote_env_set',[]))>0:
for q in tosd['remote_env_set']:
sb+=q+'\n'
sb+='\n'
# Check if pre-processing script via CK
pvck=rt.get('pre_process_via_ck',{})
if len(pvck)>0:
pvckp=src_path_local
pvckm=pvck.get('module_uoa','')
if pvckm=='': pvckm=work['self_module_uid']
pvckd=pvck.get('data_uoa','')
if pvckd!='':
rp=ck.access({'action':'find',
'module_uoa':pvckm,
'data_uoa':pvckd})
if rp['return']>0: return rp
pvckp=rp['path']
pvckc=pvck.get('script_name','')
if pvckc=='': pvckc='preprocess'
if o=='con':
ck.out('')
ck.out(' (pre processing via CK ('+pvckp+', '+pvckc+')')
ck.out('')
# Check if has custom script
try:
cdd=os.getcwd()
except OSError:
os.chdir('..')
cdd=os.getcwd()
cs=None
rxx=ck.load_module_from_path({'path':pvckp, 'module_code_name':pvckc, 'skip_init':'yes'})
cs=rxx.get('code', None)
if cs==None:
rxx['return']=1
rxx['error']='problem loading python code: '+rxx['error']
if rxx['return']==0:
os.chdir(cdd) # restore current dir from above operation
# Call customized script
ii={"host_os_uoa":hosx,
"host_os_uid":hos,
"host_os_dict":hosd,
"target_os_uoa":tosx,
"target_os_uid":tos,
"target_os_dict":tosd,
"target_device_id":tdid,
"ck_kernel":ck,
"misc":misc,
"meta":meta,
"deps":deps,
"env":env, # env has to be updated via returned bat file, but it can be updated for the reproducibility
"run_time":rt,
"dataset_uoa":dduoa,
"dataset_file":dfile,
"dataset_path":dp,
"dataset_meta":dset,
"params":params,
"device_cfg":device_cfg,
"out":oo
}
rxx=cs.ck_preprocess(ii)
if rxx['return']==0:
nenv=rxx.get('new_env',{})
for zk in nenv:
zv=str(nenv[zk])
env[zk]=zv
if zv.find(' ')>=0 and not zv.startswith(eifsx):
zv=eifsx+zv+eifsx
sb+=no+etset+' '+zk+'='+str(zv)+'\n'
psb=rxx.get('bat','')
if psb!='':
sb+='\n'+psb+'\n'
# Add any additional commands to be added to the run script
peppc=rxx.get('extra_post_process_cmd','')
if peppc!='':
eppc+='\n'+peppc+'\n'
# Add any additional input files required by preprocessing
prif=rxx.get('run_input_files',[])
if len(prif)>0:
for df in prif:
if df not in rif:
rif.append(df)
# Add any additional output files generated by preprocessing
prof=rxx.get('run_output_files',[])
if len(prof)>0:
for df in prof:
if df not in rof:
rof.append(df)
if rxx['return']>0:
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']='pre-processing script via CK failed ('+rxx['error']+')'
if o=='con':
ck.out(' (pre processing script via CK failed: '+rxx['error']+')')
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
# If remote and target exe
if remote=='yes' and (target_exe!='' or meta.get('force_copy_input_files_to_remote','')=='yes'):
if sdi!='yes' and srn==0 or ati==0:
# Copy explicit input files, if first time
for df in rif:
# Update if has env
j1=df.find('$<<')
j2=df.find('>>$')
if j2>0:
dfk=df[j1+3:j2]
dfx=env.get(dfk,'')
if dfx=='': dfx=aenv.get(dfk,'')
if dfx!='':
df=df[:j1]+dfx+df[j2+3:]
rifo[df]='yes'
else:
return {'return':1, 'error':'environment variable "'+dfk+'" was not found in environment from dependencies'}
df0, df1 = os.path.split(df)
if df in rifo:
dfx=df
dfy=rdir+stdirs+df1
else:
dfx=os.path.join(p,df)
dfy=rdir+stdirs+df
ry=copy_file_to_remote({'target_os_dict':tosd,
'device_id':tdid,
'file1':dfx,
'file1s':df1,
'file2':dfy,
'out':oo})
if ry['return']>0: return ry
# Check if has unparsed
if sunparsed!='':
c+=' '+sunparsed
# Check if redirect output
rco1=rt.get('run_cmd_out1','')
rco2=rt.get('run_cmd_out2','')
if ee!='':
sb+='\n'+no+ee+'\n\n'
sb+='\necho executing code ...\n'
if (remote!='yes' or meta.get('run_via_third_party','')=='yes') and cons!='yes':
if ercmd!='': c+=' '+ercmd
if rco1!='': c+=' '+stro+' '+rco1
if rco2!='': c+=' '+stre+' '+rco2
sb+=no+c+'\n'
# Stop energy monitor, if needed and if supported
if me=='yes' and sspm2!='':
if o=='con':
ck.out('')
ck.out('Adding energy monitor')
ck.out('')
sb+='\n'
sb+=scall+' '+sspm2+'\n'
sb+='\n'
fn=''
# Check pre-processing scripts
lppc0=rt.get('pre_process_cmds',[])
ppc0=rt.get('pre_process_cmd','')
if ppc0!='': lppc0.append(ppc0)
# Check if traditional pre-processing script
srx=0 # script exit code
if len(lppc0)>0:
sbu=sbenv+'\n\n'
if ee!='':
sbu+='\n'+no+ee+'\n\n'
for ppc in lppc0:
while ppc.find('$<<')>=0:
j1=ppc.find('$<<')
j2=ppc.find('>>$')
if j2>0:
j3=ppc[j1+3:j2]
ppc=ppc[:j1]+env.get(j3,'')+ppc[j2+3:]
ppc=ppc.replace('$<<',svarb).replace('>>$',svare)
ppc=ppc.replace('$#dir_sep#$',stdirs)
ppc=ppc.replace('$#src_path_local#$', src_path_local).replace('$#src_path#$', src_path)
# Pre-processing is performed on the local machine, so dataset path should be local, not remote!
ppc=ppc.replace('$#dataset_path#$',dp+sdirs)
r9=substitute_some_ck_keys({'string':ppc})
if r9['return']>0: return r9
ppc=r9['string']
# Substitute dataset file if needed
for k in range(0, len(dfiles)):
df=dfiles[k]
if dfile!='' and k==0: df=dfile
kk='$#dataset_filename'
if k>0: kk+='_'+str(k)
kk+='#$'
ppc=ppc.replace(kk, df)
sbu+=ppc+'\n'
if o=='con':
ck.out('')
ck.out(' (pre processing:"')
ck.out('')
ck.out(sbu)
ck.out(' )')
# Record to tmp batch and run
rx=ck.gen_tmp_file({'prefix':'tmp-', 'suffix':sext, 'remove_dir':'yes'})
if rx['return']>0: return rx
fn=rx['file_name']
rx=ck.save_text_file({'text_file':fn, 'string':sbu})
if rx['return']>0: return rx
y=''
if sexe!='':
y+=sexe+' '+sbp+fn+envsep
yy=scall+' '+sbp+fn
y+=' '+yy
srx=os.system(y)
if sca!='yes' and os.path.isfile(fn):
os.remove(fn)
# If error code > 0, set as the error code of the main program and quit
if srx>0:
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']='pre-processing script failed'
if o=='con':
ck.out(' (pre processing script failed!)')
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
# Check post-processing scripts
lppc=rt.get('post_process_cmds',[])
lppcvc=rt.get('post_process_via_ck','')
ppc=rt.get('post_process_cmd','')
if ppc!='': lppc.append(ppc)
ck_check_output=None # detect customized output comparison plugin
fgtf=rt.get('fine_grain_timer_file','')
if env.get('XOPENME_TIME_FILE','')!='':
fgtf=env['XOPENME_TIME_FILE']
# Check if extra post_process
if eppc!='':
sb+=eppc+'\n'
sb=sb.replace('$#BIN_FILE#$', te)
te1=te
if te.startswith('./'):
te1=te[2:]
sb=sb.replace('$#ONLY_BIN_FILE#$', te1)
# Calibrate execution time (to make it longer and minimize system variation,
# if supported)
csb=sb
orepeat=repeat
calibrate_success=False
xcn_max=i.get('calibration_max','')
if xcn_max=='': xcn_max=cfg['calibration_max']
cn_max=int(xcn_max)
for g in rt.get('run_output_files',[]):
rof.append(g)
cn=1
while True:
# Clean output files
rofx=[]
if rco1!='': rofx.append(rco1)
if rco2!='': rofx.append(rco2)
for df in rof:
rofx.append(df)
if o=='con' and len(rofx)>0:
ck.out(' Cleaning output files and directories:')
if skip_exec!='yes':
for df in rofx:
if o=='con': ck.out(' '+df)
if remote=='yes' and meta.get('run_via_third_party','')!='yes':
# Clean data files on device
y=rs+' '+tosd['delete_file']+ ' '+rdir+stdirs+df+' '+rse
if o=='con':
ck.out('')
ck.out(y)
ck.out('')
ry=os.system(y)
if tosd.get('delete_file_extra','')!='':
y=tosd['delete_file_extra']+df+' '+rse
if o=='con':
ck.out('')
ck.out(y)
ck.out('')
ry=os.system(y)
if os.path.isfile(df):
os.remove(df)
# Delete global directories locally (needed for ARM WA)
for df in meta.get('clean_dirs',[]):
if df!='':
if o=='con':
ck.out('')
ck.out(' Removing directory '+df+' ...')
ck.out('')
shutil.rmtree(df,ignore_errors=True)
if o=='con': ck.out('')
if sc!='yes' and 'CT_REPEAT_MAIN' in run_vars:
if o=='con':
ck.out(sep)
ck.out('### Calibration '+str(cn)+' out of '+xcn_max+' ; Kernel repeat number = '+str(repeat))
sb=csb
if sc!='yes' and 'CT_REPEAT_MAIN' in run_vars and repeat!=-1:
sb=sb.replace('$#repeat#$', str(repeat))
env['CT_REPEAT_MAIN']=str(repeat)
# Check sudo init
if isd=='yes':
if o=='con':
ck.out(sep)
ck.out(' (preparing sudo - may ask password ...)')
if remote!='yes':
os.system(sudo_init)
if o=='con': ck.out(sep)
# Prepare tmp batch file with run instructions
rx=ck.gen_tmp_file({'prefix':'tmp-', 'suffix':sext, 'remove_dir':'yes'})
if rx['return']>0: return rx
fn=rx['file_name']
xbbp=bbp
if remote=='yes':
xbbp=bbpt
if xbbp!='':
sb=bbp+'\n\n'+sb
rx=ck.save_text_file({'text_file':fn, 'string':sb})
if rx['return']>0: return rx
# Prepare execution
if remote=='yes' and meta.get('run_via_third_party','')!='yes':
# Copy above batch file to remote device
y=tosd.get('remote_push','').replace('$#device#$',xtdid)
y=y.replace('$#file1#$', fn)
y=y.replace('$#file2#$', rdir+fn)
if o=='con':
ck.out(sep)
ck.out(y)
ck.out('')
ry=os.system(y)
if ry>0:
return {'return':1, 'error':'copying to remote device failed'}
# Prepare command line for remote device
y=''
if isd=='yes':
y+=sudo_init+' '+envtsep
y+=sudo_pre+' '+envtsep
y+=tosd.get('interpreter','')+' '+stbp+fn
# x=sb.split('\n')
# for q in x:
# if q!='':
# if y!='': y+=envtsep
# y+=' '+q
if isd=='yes': y=y+' '+envtsep+' '+sudo_post
eifsx1=eifsx
if rs.endswith('"'):
eifsx1=''
elif eifsx!='':
y=y.replace('"','\\"')
yrdir=rdir
if tosd.get('remote_dir_full','')!='':
yrdir=tosd['remote_dir_full']+stdirs+rdir
y=rs+' '+eifsx1+tosd['change_dir']+' '+yrdir+envtsep+' '+y+eifsx1+' '+rse
if cons!='yes':
if ercmd!='': y+=' '+ercmd
if rco1!='': y+=' '+stro+' '+rco1
if rco2!='': y+=' '+stre+' '+rco2
# if o=='con':
# ck.out(y)
else:
y=''
if sexe!='':
y+=sexe+' '+sbp+fn+envsep
if isd=='yes':
yy=sudo_pre+' '+sbp+fn+' '+envtsep+' '+sudo_post
else:
yy=scall+' '+sbp+fn
y+=' '+yy
if remote!='yes' and ubtr!='': y=ubtr.replace('$#cmd#$',y)
if o=='con':
ck.out(sep)
ck.out('Prepared script:')
ck.out('')
ck.out(sb)
ck.out(sep)
ck.out(' ('+y.strip()+')')
if o=='con':
ck.out('')
ck.out(' (sleep 0.5 sec ...)')
time.sleep(0.5)
ck.out('')
ck.out(' (run ...)')
############################################## Running code here ##############################################
sys.stdout.flush()
start_time1=time.time()
rx=0
rry=0
if skip_exec!='yes':
ry=ck.system_with_timeout({'cmd':y, 'timeout':xrto})
rry=ry['return']
if rry>0:
if rry!=8: return ry
else:
rx=ry['return_code']
elif o=='con':
ck.out('')
ck.out(' * skiped execution ... *')
exec_time=time.time()-start_time1
# Hack to fix occasional strange effect when time.time() is 0
if exec_time<0: exec_time=-exec_time
if sca!='yes':
if fn!='' and os.path.isfile(fn): os.remove(fn)
# Pull files from the device if remote
if remote=='yes':
xrof=rof
if i.get('pull_only_timer_files','')=='yes':
xrof=[fgtf]
for df in xrof:
# Pull output files from device
df0, df1 = os.path.split(df)
# Push data files to device
y=tosd['remote_pull'].replace('$#device#$',xtdid)
y=y.replace('$#file1#$', rdir+stdirs+df)
y=y.replace('$#file1s#$', df1)
y=y.replace('$#file2#$', df)
if o=='con':
ck.out('')
ck.out(y)
ck.out('')
ry=os.system(y)
y=tosd.get('remote_pull_post','').replace('$#device#$',xtdid)
if y!='':
y=y.replace('$#file1#$', rdir+stdirs+df)
y=y.replace('$#file1s#$', df1)
y=y.replace('$#file2#$', df)
if o=='con':
ck.out(sep)
ck.out(y)
ck.out('')
ry=os.system(y)
if ry>0:
return {'return':1, 'error':'pulling from remote device failed'}
# Check if print files
pfar=meta.get('print_files_after_run',[])
if len(pfar)>0 and sfp!='yes' and o=='con':
ck.out('')
ck.out(' (printing output files) ')
for q in pfar:
ck.out('')
ck.out(' * '+q)
ck.out('')
rz=ck.load_text_file({'text_file':q, 'split_to_list':'yes', 'encoding':sys.stdout.encoding})
if rz['return']==0:
lxx=rz['lst']
for q1 in lxx:
ck.out(' '+q1)
# Check if post-processing script from CMD
if pp_uoa!='':
if o=='con':
ck.out('')
ck.out(' (post processing from script '+pp_uoa+' / '+pp_name+' ... )"')
ck.out('')
iz={'action':'run',
'module_uoa':cfg['module_deps']['script'],
'data_uoa':pp_uoa,
'name':pp_name,
'params':pp_params}
rz=ck.access(iz)
if rz['return']>0: return rz
# For now ignore output
# Check if post-processing script
srx=0 # script exit code
# Newer variant (more consistent with pre_process_via_ck
if type(lppcvc)==dict:
pvck=lppcvc
pvckp=src_path_local
pvckm=pvck.get('module_uoa','')
if pvckm=='': pvckm=work['self_module_uid']
pvckd=pvck.get('data_uoa','')
if pvckd!='':
rp=ck.access({'action':'find',
'module_uoa':pvckm,
'data_uoa':pvckd})
if rp['return']>0: return rp
pvckp=rp['path']
pvckc=pvck.get('script_name','')
if pvckc=='': pvckc='postprocess'
if o=='con':
ck.out('')
ck.out(' (post processing via CK ('+pvckp+', '+pvckc+')')
ck.out('')
# Check if has custom script
try:
cdd=os.getcwd()
except OSError:
os.chdir('..')
cdd=os.getcwd()
cs=None
rxx=ck.load_module_from_path({'path':pvckp, 'module_code_name':pvckc, 'skip_init':'yes'})
cs=rxx.get('code', None)
if cs==None:
rxx['return']=1
rxx['error']='problem loading python code: '+rxx['error']
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=rxx['error']
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
if rxx['return']==0:
os.chdir(cdd) # restore current dir from above operation
if cs!=None and 'ck_check_output' in dir(cs):
ck_check_output=cs.ck_check_output
if cs!=None and 'ck_postprocess' in dir(cs):
as_cmd=False
# Call customized script
ii={"host_os_uoa":hosx,
"host_os_uid":hos,
"host_os_dict":hosd,
"target_os_uoa":tosx,
"target_os_uid":tos,
"target_os_dict":tosd,
"target_device_id":tdid,
"ck_kernel":ck,
"misc":misc,
"meta":meta,
"deps":deps,
"env":env,
"dataset_uoa":dduoa,
"dataset_file":dfile,
"dataset_path":dp,
"dataset_meta":dset,
"run_time":rt,
"params":params,
"device_cfg":device_cfg,
"out":oo
}
rxx=cs.ck_postprocess(ii)
srx=rxx['return']
if srx==0:
xchars=rxx.get('characteristics',{})
if len(xchars)>0:
et=xchars.get('execution_time','')
if et!='':
exec_time=float(et)
ccc.update(xchars)
if len(rxx.get('misc',{}))>0:
misc.update(rxx['misc'])
else:
if o=='con':
ck.out(' (post processing script failed: '+rxx['error']+'!)')
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=rxx['error']
# break
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
# Older variant
if len(lppc)>0:
for ppc in lppc:
while ppc.find('$<<')>=0:
j1=ppc.find('$<<')
j2=ppc.find('>>$')
if j2>0:
j3=ppc[j1+3:j2]
ppc=ppc[:j1]+env.get(j3,'')+ppc[j2+3:]
ppc=ppc.replace('$<<',svarb).replace('>>$',svare)
ppc=ppc.replace('$#dir_sep#$',stdirs)
ppc=ppc.replace('$#src_path_local#$', src_path_local).replace('$#src_path#$', src_path)
# Post-processing is performed on the local machine, so dataset path should be local, not remote!
# if remote=='yes':
# ppc=ppc.replace('$#dataset_path#$','')
# elif dp!='':
ppc=ppc.replace('$#dataset_path#$',dp+sdirs)
r9=substitute_some_ck_keys({'string':ppc})
if r9['return']>0: return r9
ppc=r9['string']
if o=='con':
ck.out('')
ck.out(' (post processing: "'+ppc+'"')
ck.out('')
# Check if via CK, otherwise run as system
if lppcvc=='yes':
ppcs=ppc.split()
if len(ppcs)>1:
if ppcs[0].startswith('python'):
ppcm=ppcs[1]
ppcm1=os.path.basename(ppcm)
ppcm2=os.path.dirname(ppcm)
if ppcm1.endswith('.py'):
ppcm1=ppcm1[:-3]
# Check if has custom script
try:
cdd=os.getcwd()
except OSError:
os.chdir('..')
cdd=os.getcwd()
cs=None
rxx=ck.load_module_from_path({'path':ppcm2, 'module_code_name':ppcm1, 'skip_init':'yes'})
if rxx['return']>0:
if o=='con':
ck.out(' (post processing script failed: '+rxx['error']+'!)')
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=rxx['error']
# break
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
cs=rxx['code']
os.chdir(cdd) # restore current dir from above operation
if cs!=None and 'ck_check_output' in dir(cs):
ck_check_output=cs.ck_check_output
if cs!=None and 'ck_postprocess' in dir(cs):
as_cmd=False
# Call customized script
ii={"host_os_uoa":hosx,
"host_os_uid":hos,
"host_os_dict":hosd,
"target_os_uoa":tosx,
"target_os_uid":tos,
"target_os_dict":tosd,
"target_device_id":tdid,
"ck_kernel":ck,
"misc":misc,
"meta":meta,
"deps":deps,
"env":env,
"dataset_uoa":dduoa,
"dataset_file":dfile,
"dataset_path":dp,
"dataset_meta":dset,
"run_time":rt,
"params":params,
"device_cfg":device_cfg,
"out":oo
}
rxx=cs.ck_postprocess(ii)
srx=rxx['return']
if srx==0:
xchars=rxx.get('characteristics',{})
if len(xchars)>0:
et=xchars.get('execution_time','')
if et!='':
exec_time=float(et)
ccc.update(xchars)
if len(rxx.get('misc',{}))>0:
misc.update(rxx['misc'])
else:
if o=='con':
ck.out(' (post processing script failed: '+rxx['error']+'!)')
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=rxx['error']
# break
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
else:
srx=os.system(ppc)
# If error code > 0, set as the error code of the main program and quit
if srx>0:
if o=='con':
ck.out(' (post processing script failed!)')
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']='post processing script failed'
# break
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
# If script failed, exit
if srx>0:
# break
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
# Check if fine-grain time
if fgtf!='':
if o=='con':
ck.out('')
ck.out(' (reading fine grain timers from '+fgtf+' ...)')
ck.out('')
rq=ck.load_json_file({'json_file':fgtf})
if rq['return']>0:
misc['run_success']='no'
misc['run_success_bool']=False
misc['fail_reason']=rq['error']
ccc['return_code']=rx
if o=='con':
ck.out('')
ck.out('Program execution likely failed (can\'t find fine grain timers)!')
ck.out('')
return {'return':0, 'tmp_dir':rcdir, 'misc':misc, 'characteristics':ccc, 'deps':deps}
drq=rq['dict']
ccc.update(drq)
et=drq.get('execution_time','')
exec_time=0.0
if et!='':
exec_time=float(et)
if o=='con' and i.get('skip_print_timers','')!='yes':
import json
ck.out(json.dumps(drq, indent=2, sort_keys=True))
ck.out('')
# If return code >0 and program does not ignore return code, quit
if (rx>0 and vcmd.get('ignore_return_code','').lower()!='yes') or rry>0:
break
# Check calibration
if sc=='yes' or repeat==-1 or 'CT_REPEAT_MAIN' not in run_vars: