Skip to content

Commit

Permalink
- arranged some additional output in function analyseScheduledTaskGraph
Browse files Browse the repository at this point in the history
git-svn-id: https://openmodelica.org/svn/OpenModelica/trunk@18400 f25d12d1-65f4-0310-ae8a-bbce733d8d8e
  • Loading branch information
Volker Waurich committed Dec 4, 2013
1 parent 6ea80e0 commit 1e28a6a
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 11 deletions.
50 changes: 48 additions & 2 deletions Compiler/BackEnd/HpcOmScheduler.mo
Original file line number Diff line number Diff line change
Expand Up @@ -2048,6 +2048,54 @@ algorithm
end printSchedule;


public function analyseScheduledTaskGraph"functions to analyse the scheduled task graph can be applied in here.
author:Waurich TUD 2013-12"
input Schedule scheduleIn;
input Integer numProcIn;
input HpcOmTaskGraph.TaskGraph taskGraphIn;
input HpcOmTaskGraph.TaskGraphMeta taskGraphMetaIn;
output String criticalPathInfoOut;
algorithm
criticalPathInfoOut := match(scheduleIn,numProcIn,taskGraphIn,taskGraphMetaIn)
local
list<String> lockIdc;
list<list<Integer>> levels;
list<list<Integer>> parallelSets;
list<list<Integer>> criticalPaths, criticalPathsWoC;
array<list<Task>> threadTasks;
Real cpCosts, cpCostsWoC, serTime, parTime, speedUp, speedUpMax;
String criticalPathInfo;
case(LEVELSCHEDULE(eqsOfLevels=levels),_,_,_)
equation
//get the criticalPath
((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC),parallelSets) = HpcOmTaskGraph.longestPathMethod(taskGraphIn,taskGraphMetaIn);
criticalPathInfo = HpcOmTaskGraph.dumpCriticalPathInfo((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC));
Debug.fcall(Flags.HPCOM_DUMP,print,criticalPathInfo);
then
criticalPathInfo;
case(THREADSCHEDULE(threadTasks=threadTasks,lockIdc=lockIdc),_,_,_)
equation
Debug.fcall(Flags.HPCOM_DUMP,print,"the number of locks: "+&intString(listLength(lockIdc))+&"\n");
//get the criticalPath
((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC),parallelSets) = HpcOmTaskGraph.longestPathMethod(taskGraphIn,taskGraphMetaIn);
criticalPathInfo = HpcOmTaskGraph.dumpCriticalPathInfo((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC));
Debug.fcall(Flags.HPCOM_DUMP,print,criticalPathInfo);
//predict speedup etc.
(serTime,parTime,speedUp,speedUpMax) = predictExecutionTime(scheduleIn,SOME(cpCostsWoC),numProcIn,taskGraphIn,taskGraphMetaIn);
Debug.fcall(Flags.HPCOM_DUMP,print,"the serialCosts: "+&realString(serTime)+&"\n");
Debug.fcall(Flags.HPCOM_DUMP,print,"the parallelCosts: "+&realString(parTime)+&"\n");
Debug.fcall(Flags.HPCOM_DUMP,print,"the cpCosts: "+&realString(cpCostsWoC)+&"\n");
printPredictedExeTimeInfo(serTime,parTime,speedUp,speedUpMax,numProcIn);
then
criticalPathInfo;
case(EMPTYSCHEDULE(),_,_,_)
equation
then
"";
end match;
end analyseScheduledTaskGraph;


public function predictExecutionTime "computes the theoretically execution time for the serial simulation and the parallel. a speedup ratio is determined by su=serTime/parTime.
the max speedUp is computed via the serTime/criticalPathCosts.
author:Waurich TUD 2013-11"
Expand Down Expand Up @@ -2117,8 +2165,6 @@ algorithm
isNotOkString = "Something is weird. The predicted SpeedUp is "+&realString(speedUp)+&" and the theoretical maximum speedUp is "+&realString(speedUpMax)+&"\n";
Debug.bcall(realGt(speedUp,speedUpMax),print,isNotOkString);
Debug.bcall(realLe(speedUp,speedUpMax),print,isOkString);
Debug.fcall(Flags.HPCOM_DUMP,print,"the serialCosts: "+&realString(serTime)+&"\n");
Debug.fcall(Flags.HPCOM_DUMP,print,"the parallelCosts: "+&realString(parTime)+&"\n");
then
();
end matchcontinue;
Expand Down
13 changes: 4 additions & 9 deletions Compiler/BackEnd/HpcOmSimCode.mo
Original file line number Diff line number Diff line change
Expand Up @@ -232,22 +232,17 @@ algorithm
//Apply filters
//-------------
(taskGraph1,taskGraphData1) = applyFiltersToGraph(taskGraphOde,taskGraphDataOde,inBackendDAE,true);
Debug.fcall(Flags.HPCOM_DUMP,HpcOmTaskGraph.printTaskGraph,taskGraph1);
Debug.fcall(Flags.HPCOM_DUMP,HpcOmTaskGraph.printTaskGraphMeta,taskGraphData1);
//Debug.fcall(Flags.HPCOM_DUMP,HpcOmTaskGraph.printTaskGraph,taskGraph1);
//Debug.fcall(Flags.HPCOM_DUMP,HpcOmTaskGraph.printTaskGraphMeta,taskGraphData1);

//Create schedule
//---------------
numProc = Flags.getConfigInt(Flags.NUM_PROC);
(numProc,numFixed) = setNumProc(numProc,cpCostsWoC,taskGraphDataOde);
schedule = createSchedule(taskGraph1,taskGraphData1,sccSimEqMapping,filenamePrefix,numProc);
(schedule,numProc) = repeatScheduleWithOtherNumProc(taskGraph1,taskGraphData1,sccSimEqMapping,filenamePrefix,cpCostsWoC,schedule,numProc,numFixed);

(serTime,parTime,speedUp,speedUpMax) = HpcOmScheduler.predictExecutionTime(schedule,SOME(cpCostsWoC),numProc,taskGraph1,taskGraphData1);
HpcOmScheduler.printPredictedExeTimeInfo(serTime,parTime,speedUp,speedUpMax,numProc);
((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC),parallelSets) = HpcOmTaskGraph.longestPathMethod(taskGraph1,taskGraphData1);
criticalPathInfo = HpcOmTaskGraph.dumpCriticalPathInfo((criticalPaths,cpCosts),(criticalPathsWoC,cpCostsWoC));
Debug.fcall(Flags.HPCOM_DUMP,print,criticalPathInfo);

criticalPathInfo = HpcOmScheduler.analyseScheduledTaskGraph(schedule,numProc,taskGraph1,taskGraphData1);

taskScheduleSimCode = HpcOmScheduler.convertScheduleToSimCodeSchedule(schedule);
schedulerInfo = HpcOmScheduler.convertScheduleStrucToInfo(schedule,arrayLength(taskGraph));

Expand Down

0 comments on commit 1e28a6a

Please sign in to comment.