Skip to content

Commit

Permalink
Merge pull request #59 from MatterMiners/enhancement/units-58
Browse files Browse the repository at this point in the history
Standardisation of units
  • Loading branch information
eileen-kuehn committed Nov 7, 2019
2 parents 9e7173b + bc556a7 commit a698820
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 31 deletions.
26 changes: 13 additions & 13 deletions lapis/job_io/htcondor.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,18 @@ def htcondor_job_reader(
used_resource_name_mapping={ # noqa: B006
"queuetime": "QDate",
"walltime": "RemoteWallClockTime", # s
"cores": "Number of Allocated Processors",
"memory": "MemoryUsage", # MB
"disk": "DiskUsage_RAW", # KiB
},
unit_conversion_mapping={ # noqa: B006
"RequestCpus": 1,
"RequestWalltime": 1,
"RequestMemory": 1.024 / 1024,
"RequestDisk": 1.024 / 1024 / 1024,
"RequestMemory": 1024 * 1024,
"RequestDisk": 1024,
"queuetime": 1,
"RemoteWallClockTime": 1,
"Number of Allocated Processors": 1,
"MemoryUsage": 1 / 1024,
"DiskUsage_RAW": 1.024 / 1024 / 1024,
"MemoryUsage": 1000 * 1000,
"DiskUsage_RAW": 1024,
},
):
input_file_type = iterable.name.split(".")[-1].lower()
Expand All @@ -51,9 +49,10 @@ def htcondor_job_reader(
resources = {}
for key, original_key in resource_name_mapping.items():
try:
resources[key] = float(
entry[original_key]
) * unit_conversion_mapping.get(original_key, 1)
resources[key] = int(
float(entry[original_key])
* unit_conversion_mapping.get(original_key, 1)
)
except ValueError:
pass

Expand All @@ -62,13 +61,14 @@ def htcondor_job_reader(
(float(entry["RemoteSysCpu"]) + float(entry["RemoteUserCpu"]))
/ float(entry[used_resource_name_mapping["walltime"]])
)
* unit_conversion_mapping.get(used_resource_name_mapping["cores"], 1)
* unit_conversion_mapping.get(resource_name_mapping["cores"], 1)
}
for key in ["memory", "walltime", "disk"]:
original_key = used_resource_name_mapping[key]
used_resources[key] = float(
entry[original_key]
) * unit_conversion_mapping.get(original_key, 1)
used_resources[key] = int(
float(entry[original_key])
* unit_conversion_mapping.get(original_key, 1)
)

try:
resources["inputfiles"] = deepcopy(entry["Inputfiles"])
Expand Down
34 changes: 20 additions & 14 deletions lapis/job_io/swf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,18 @@ def swf_job_reader(
iterable,
resource_name_mapping={ # noqa: B006
"cores": "Requested Number of Processors",
"walltime": "Requested Time",
"memory": "Requested Memory",
"walltime": "Requested Time", # s
"memory": "Requested Memory", # KiB
},
used_resource_name_mapping={ # noqa: B006
"walltime": "Run Time",
"walltime": "Run Time", # s
"cores": "Number of Allocated Processors",
"memory": "Used Memory",
"memory": "Used Memory", # KiB
"queuetime": "Submit Time",
},
unit_conversion_mapping={ # noqa: B006
"Used Memory": 1 / 1024 / 1024,
"Requested Memory": 1 / 2114 / 1024,
"Used Memory": 1024,
"Requested Memory": 1024,
},
):
header = {
Expand Down Expand Up @@ -71,14 +71,20 @@ def swf_job_reader(
)
# handle memory
key = "memory"
resources[key] = (
float(row[header[resource_name_mapping[key]]])
* float(row[header[resource_name_mapping["cores"]]])
) * unit_conversion_mapping.get(resource_name_mapping[key], 1)
used_resources[key] = (
float(row[header[used_resource_name_mapping[key]]])
* float(row[header[used_resource_name_mapping["cores"]]])
) * unit_conversion_mapping.get(used_resource_name_mapping[key], 1)
resources[key] = int(
(
float(row[header[resource_name_mapping[key]]])
* float(row[header[resource_name_mapping["cores"]]])
)
* unit_conversion_mapping.get(resource_name_mapping[key], 1)
)
used_resources[key] = int(
(
float(row[header[used_resource_name_mapping[key]]])
* float(row[header[used_resource_name_mapping["cores"]]])
)
* unit_conversion_mapping.get(used_resource_name_mapping[key], 1)
)
yield Job(
resources=resources,
used_resources=used_resources,
Expand Down
6 changes: 3 additions & 3 deletions lapis/pool_io/htcondor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ def htcondor_pool_reader(
},
unit_conversion_mapping: dict = { # noqa: B006
"TotalSlotCPUs": 1,
"TotalSlotDisk": 1.024 / 1024,
"TotalSlotMemory": 1.024 / 1024,
"TotalSlotDisk": 1024 * 1024,
"TotalSlotMemory": 1024 * 1024,
},
pool_type: Callable = Pool,
make_drone: Callable = None,
Expand Down Expand Up @@ -44,7 +44,7 @@ def htcondor_pool_reader(
make_drone=partial(
make_drone,
{
key: float(row[value]) * unit_conversion_mapping.get(value, 1)
key: int(float(row[value]) * unit_conversion_mapping.get(value, 1))
for key, value in resource_name_mapping.items()
},
ignore_resources=["disk"],
Expand Down
6 changes: 5 additions & 1 deletion lapis/pool_io/machines.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ def machines_pool_reader(
"cores": "CPUs_per_node",
"memory": "RAM_per_node_in_KB",
},
unit_conversion_mapping={ # noqa: B006
"CPUs_per_node": 1,
"RAM_per_node_in_KB": 1000,
},
pool_type: Callable = Pool,
make_drone: Callable = None,
):
Expand All @@ -33,7 +37,7 @@ def machines_pool_reader(
make_drone=partial(
make_drone,
{
key: float(row[value])
key: int(float(row[value]) * unit_conversion_mapping.get(value, 1))
for key, value in resource_name_mapping.items()
},
),
Expand Down

0 comments on commit a698820

Please sign in to comment.