Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions config/kub/kub_plots.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"variables": [ "execute", "postprocess", "constructor", "updateForUse", "simulation" ],
"names": [],
"xaxis": {
"parameter": "nb_tasks.tasks_per_node",
"parameter": "resources.tasks",
"label": "Number of tasks"
},
"yaxis": {
Expand All @@ -22,7 +22,7 @@
"variables": [ "execute", "postprocess", "constructor", "updateForUse", "simulation" ],
"names": [],
"xaxis": {
"parameter": "nb_tasks.tasks_per_node",
"parameter": "resources.tasks",
"label": "Number of tasks"
},
"yaxis": {
Expand All @@ -36,7 +36,7 @@
"variables": [ "execute", "postprocess", "constructor", "updateForUse", "simulation" ],
"names": [],
"xaxis": {
"parameter": "nb_tasks.tasks_per_node",
"parameter": "resources.tasks",
"label": "Number of tasks"
},
"yaxis": {
Expand Down
29 changes: 19 additions & 10 deletions config/kub/poznan.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
"output_directory": "{{machine.output_app_dir}}/kub",
"use_case_name": "poznan",
"timeout":"0-0:15:0",
"resources":{
"tasks":"{{parameters.resources.tasks.value}}",
"exclusive_access":"{{parameters.resources.exclusive_access.value}}"
},
"platforms": {
"apptainer":{
"image": {
Expand Down Expand Up @@ -30,15 +34,23 @@
"stages": [
{
"name":"",
"filepath": "instances/np_{{parameters.nb_tasks.tasks.value}}/logs/timers.json",
"filepath": "instances/np_{{parameters.resources.tasks.value}}/logs/timers.json",
"format": "json",
"variables_path":"*"
"variables_path":["*.timeElapsed.max","execute.subtimers.*.subtimers.*.timeElapsed.max"],
"units":{
"*":"custom instance unit",
"postprocess.exportOutputs":"exportOutputs unit special",
"simulation.buildingExecuteStep":"buildingExecuteStep unit special"
}
},
{
"name":"",
"filepath": "logs/timers.json",
"format": "json",
"variables_path":"*"
"variables_path":["*.timeElapsed.max","*.subtimers.updateForUse.subtimers.*.timeElapsed.max"],
"units":{
"*":"custom unit"
}
}
]
},
Expand All @@ -48,14 +60,11 @@
},
"parameters": [
{
"name": "nb_tasks",
"name": "resources",
"sequence": [
{"tasks_per_node" : 8, "tasks":8, "exclusive_access":true},
{"tasks_per_node" : 16, "tasks":16, "exclusive_access":true},
{"tasks_per_node" : 32, "tasks":32, "exclusive_access":true},
{"tasks_per_node" : 64, "tasks":64, "exclusive_access":true},
{"tasks_per_node" : 128, "tasks":128, "exclusive_access":true},
{"tasks_per_node" : 128, "tasks":256, "exclusive_access":true}
{"tasks":32, "exclusive_access":true},
{"tasks":64, "exclusive_access":true},
{"tasks":128, "exclusive_access":true}
]
}
]
Expand Down
29 changes: 18 additions & 11 deletions config/toolbox_heat/thermal_bridges_case_3.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,12 @@
"--case.discretization {{parameters.discretization.value}}",
"--heat.json.patch='{\"op\": \"replace\",\"path\": \"/Meshes/heat/Import/filename\",\"value\": \"{{platforms.{{machine.platform}}.input_dir}}/{{input_file_dependencies.mesh_json}}\" }'"
],
"outputs": [
{
"filepath": "{{output_directory}}/{{instance}}/{{use_case_name}}/heat.measures/values.csv",
"format": "csv"
}
],
"additional_files":{
"parameterized_descriptions_filepath":"{{output_directory}}/{{instance}}/{{use_case_name}}/heat.information.adoc"
},
"scalability": {
"directory": "{{output_directory}}/{{instance}}/{{use_case_name}}",
"clean_directory":false,
"stages": [
{
"name": "Constructor",
Expand All @@ -73,7 +68,19 @@
{
"name": "Solve",
"filepath": "heat.scalibility.HeatSolve.data",
"format": "tsv"
"format": "tsv",
"units":{
"*":"s",
"ksp-niter":"iter"
}
},
{
"name":"Outputs",
"filepath": "heat.measures/values.csv",
"format": "csv",
"units":{
"*":"W"
}
}
]
},
Expand All @@ -87,7 +94,7 @@
"zip":[
{
"name":"tasks",
"sequence":[128,256,384,512,640,768,896,1024,1152,1280]
"sequence":[64]
},
{
"name":"exclusive_access",
Expand All @@ -112,9 +119,9 @@
"name":"discretization",
"sequence":["P1","P2","P3"],
"conditions":{
"P1":[{ "resources.tasks":[128,256,384,512,640,768] }],
"P2":[{ "resources.tasks":[768,896,1024,1152,1280] }],
"P3":[{ "resources.tasks":[640,768,896,1024,1152,1280] }]
"P1":[{ "resources.tasks":[64] }],
"P2":[{ "resources.tasks":[] }],
"P3":[{ "resources.tasks":[] }]
}
},
{
Expand Down
12 changes: 8 additions & 4 deletions src/feelpp/benchmarking/reframe/config/configSchemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,13 @@ class Stage(BaseModel):
filepath:str
format:Literal["csv","tsv","json"]
variables_path:Optional[Union[str,List[str]]] = []
units: Optional[Dict[str,str]] = {}

@field_validator("units",mode="before")
@classmethod
def parseUnits(cls,v):
v["*"] = v.get("*","s")
return v

@model_validator(mode="after")
def checkFormatOptions(self):
Expand All @@ -36,10 +43,8 @@ class Scalability(BaseModel):
directory: str
stages: List[Stage]
custom_variables:Optional[List[CustomVariable]] = []
clean_directory: Optional[bool] = False

class AppOutput(BaseModel):
filepath: str
format: str


class Image(BaseModel):
Expand Down Expand Up @@ -107,7 +112,6 @@ class ConfigFile(BaseModel):
use_case_name: str
options: List[str]
env_variables:Optional[Dict] = {}
outputs: List[AppOutput]
input_file_dependencies: Optional[Dict[str,str]] = {}
scalability: Scalability
sanity: Sanity
Expand Down
75 changes: 1 addition & 74 deletions src/feelpp/benchmarking/reframe/outputs.py
Original file line number Diff line number Diff line change
@@ -1,75 +1,2 @@
import reframe.utility.sanity as sn
import os, re, shutil


class OutputsHandler:
"""Class to handle application outputs and convert them to reframe readable objects"""
def __init__(self,outputs_config,additional_files_config = None):
self.config = outputs_config
self.additional_files_config = additional_files_config

def getOutputs(self):
""" Opens and parses the all the outputs files provided on the configuration
Returns:
dict[str,performance_function] : Dictionary with deferrable functions containing the value of the outputs.
"""
rfm_outputs = {}
for output_info in self.config:
if output_info.format == "csv":
number_regex = re.compile(r'^-?\d+(\.\d+)?([eE][-+]?\d+)?$')
rows = sn.extractall(
r'^(?!\s*$)(.*?)[\s\r\n]*$',
output_info.filepath,
0,
conv=lambda x: [float(col.strip()) if number_regex.match(col.strip()) else col.strip() for col in x.split(',') if col.strip()]
)
header = rows[0]
rows = rows[1:]

assert all ( len(header.evaluate()) == len(row) for row in rows), f"CSV File {output_info.filepath} is incorrectly formatted"

for line in range(len(rows.evaluate())):
for i,col in enumerate(header):
rfm_outputs.update({ f"{col}" : sn.make_performance_function(rows[line][i],unit="") })
else:
raise NotImplementedError(f"Output extraction not implemented for format {output_info.format}")

return rfm_outputs

def copyDescription(self,dir_path, name): #TODO: This can be redesigned... or factor it at least
""" Searches the file on the additional_files.description_filepath configuration and copies it inside dir_path/partials
Args:
dir_path (str) : Directory where the reframe report is exported to
name(str): name of the new file (without extension)
"""
if self.additional_files_config and self.additional_files_config.description_filepath:
file_extension = self.additional_files_config.description_filepath.split(".")[-1] if "." in self.additional_files_config.description_filepath else None

outdir = os.path.join(dir_path,"partials")
if not os.path.exists(outdir):
os.mkdir(outdir)

filename = f"{name}.{file_extension}" if file_extension else name

shutil.copy2( self.additional_files_config.description_filepath, os.path.join(outdir,filename) )



def copyParametrizedDescriptions(self,dir_path,name):
""" Searches the files on the additional_files.parameterized_descriptions_filepath configuration and copy them inside dir_path/partials
Args:
dir_path (str) : Directory where the reframe report is exported to
name(str): name of the new file (without extension)
"""

if self.additional_files_config and self.additional_files_config.parameterized_descriptions_filepath:
file_extension = self.additional_files_config.parameterized_descriptions_filepath.split(".")[-1] if "." in self.additional_files_config.parameterized_descriptions_filepath else None

outdir = os.path.join(dir_path,"partials")
if not os.path.exists(outdir):
os.mkdir(outdir)

filename = f"{name}.{file_extension}" if file_extension else name

shutil.copy2( self.additional_files_config.parameterized_descriptions_filepath, os.path.join(outdir,filename) )
import os, shutil

14 changes: 8 additions & 6 deletions src/feelpp/benchmarking/reframe/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from feelpp.benchmarking.reframe.setup import ReframeSetup
from feelpp.benchmarking.reframe.validation import ValidationHandler
from feelpp.benchmarking.reframe.scalability import ScalabilityHandler
from feelpp.benchmarking.reframe.outputs import OutputsHandler


import shutil, os
Expand All @@ -16,7 +15,6 @@ class RegressionTest(ReframeSetup):
def initHandlers(self):
self.validation_handler = ValidationHandler(self.app_setup.reader.config.sanity)
self.scalability_handler = ScalabilityHandler(self.app_setup.reader.config.scalability)
self.outputs_handler = OutputsHandler(self.app_setup.reader.config.outputs,self.app_setup.reader.config.additional_files)

@run_after('run')
def executionGuard(self):
Expand Down Expand Up @@ -60,13 +58,17 @@ def setPerfVars(self):
self.perf_variables.update(
self.scalability_handler.getCustomPerformanceVariables(self.perf_variables)
)
self.perf_variables.update(
self.outputs_handler.getOutputs()
)

@run_before('performance')
def copyParametrizedFiles(self):
self.outputs_handler.copyParametrizedDescriptions(self.report_dir_path,self.hashcode)
self.app_setup.reset(self.machine_setup.reader.config)
self.app_setup.updateConfig({ "instance" : str(self.hashcode) })
self.app_setup.copyParametrizedDescriptionFile(self.report_dir_path,name=self.hashcode)

@run_before("cleanup")
def removeDirectories(self):
if self.app_setup.reader.config.scalability.clean_directory:
self.app_setup.cleanupDirectories()

@sanity_function
def sanityCheck(self):
Expand Down
Loading
Loading