Skip to content

Commit fda405f

Browse files
committed
[gen/wiki] Group clusters by queue to make things easier to read
1 parent 0b4c25c commit fda405f

File tree

1 file changed

+49
-45
lines changed

1 file changed

+49
-45
lines changed

lib/refrepo/gen/wiki/generators/site_hardware.rb

Lines changed: 49 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -108,55 +108,59 @@ def self.generate_description(site)
108108
site_accelerators += cluster_hash.select { |k, v| v['accelerators'] != '' }.count
109109
}
110110

111-
hardware[site].sort.to_h.each { |cluster_uid, cluster_hash|
112-
subclusters = cluster_hash.keys.count != 1
113-
cluster_nodes = cluster_hash.keys.flatten.count
114-
cluster_cpus = cluster_hash.map { |k, v| k.count * v['cpus_per_node'] }.reduce(:+)
115-
cluster_cores = cluster_hash.map { |k, v| k.count * v['cpus_per_node'] * v['cores_per_cpu'] }.reduce(:+)
116-
queue_str = cluster_hash.map { |k, v| v['queue_str']}.first
117-
access_conditions = []
118-
access_conditions << queue_str if queue_str != ''
119-
access_conditions << "exotic job type" if cluster_hash.map { |k, v| v['exotic']}.first
120-
table_columns = ['Cluster', 'Queue', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + (site_accelerators.zero? ? [] : ['Accelerators'])
121-
122-
text_data << ["\n== #{cluster_uid} ==\n"]
123-
text_data << ["'''#{cluster_nodes} #{G5K.pluralize(cluster_nodes, 'node')}, #{cluster_cpus} #{G5K.pluralize(cluster_cpus, 'cpu')}, #{cluster_cores} #{G5K.pluralize(cluster_cores, 'core')}" + (subclusters == true ? ",''' split as follows due to differences between nodes " : "''' ") + "([https://public-api.grid5000.fr/stable/sites/#{site}/clusters/#{cluster_uid}/nodes.json?pretty=1 json])"]
124-
125-
cluster_hash.sort.to_h.each_with_index { |(num, h), i|
126-
if subclusters
127-
subcluster_nodes = num.count
128-
subcluster_cpus = subcluster_nodes * h['cpus_per_node']
129-
subcluster_cores = subcluster_nodes * h['cpus_per_node'] * h['cores_per_cpu']
130-
text_data << "<hr style=\"height:10pt; visibility:hidden;\" />\n" if i != 0 # smaller vertical <br />
131-
text_data << ["; #{cluster_uid}-#{G5K.nodeset(num)} (#{subcluster_nodes} #{G5K.pluralize(subcluster_nodes, 'node')}, #{subcluster_cpus} #{G5K.pluralize(subcluster_cpus, 'cpu')}, #{subcluster_cores} #{G5K.pluralize(subcluster_cores, 'core')})"]
132-
end
111+
# Group by queue
112+
# Alphabetic ordering of queue names matches what we want: "default" < "production" < "testing"
113+
hardware[site].group_by { |cluster_uid, cluster_hash| cluster_hash.map { |k, v| v['queue']}.first }.sort.each { |queue, clusters|
114+
queue = (queue.nil? || queue.empty?) ? 'default' : queue
115+
text_data << "\n= Clusters in #{queue} queue ="
116+
clusters.sort.to_h.each { |cluster_uid, cluster_hash|
117+
subclusters = cluster_hash.keys.count != 1
118+
cluster_nodes = cluster_hash.keys.flatten.count
119+
cluster_cpus = cluster_hash.map { |k, v| k.count * v['cpus_per_node'] }.reduce(:+)
120+
cluster_cores = cluster_hash.map { |k, v| k.count * v['cpus_per_node'] * v['cores_per_cpu'] }.reduce(:+)
121+
queue_str = cluster_hash.map { |k, v| v['queue_str']}.first
122+
access_conditions = []
123+
access_conditions << queue_str if queue_str != ''
124+
access_conditions << "exotic job type" if cluster_hash.map { |k, v| v['exotic']}.first
125+
table_columns = ['Cluster', 'Queue', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + (site_accelerators.zero? ? [] : ['Accelerators'])
126+
127+
text_data << ["\n== #{cluster_uid} ==\n"]
128+
text_data << ["'''#{cluster_nodes} #{G5K.pluralize(cluster_nodes, 'node')}, #{cluster_cpus} #{G5K.pluralize(cluster_cpus, 'cpu')}, #{cluster_cores} #{G5K.pluralize(cluster_cores, 'core')}" + (subclusters == true ? ",''' split as follows due to differences between nodes " : "''' ") + "([https://public-api.grid5000.fr/stable/sites/#{site}/clusters/#{cluster_uid}/nodes.json?pretty=1 json])"]
129+
130+
cluster_hash.sort.to_h.each_with_index { |(num, h), i|
131+
if subclusters
132+
subcluster_nodes = num.count
133+
subcluster_cpus = subcluster_nodes * h['cpus_per_node']
134+
subcluster_cores = subcluster_nodes * h['cpus_per_node'] * h['cores_per_cpu']
135+
text_data << "<hr style=\"height:10pt; visibility:hidden;\" />\n" if i != 0 # smaller vertical <br />
136+
text_data << ["; #{cluster_uid}-#{G5K.nodeset(num)} (#{subcluster_nodes} #{G5K.pluralize(subcluster_nodes, 'node')}, #{subcluster_cpus} #{G5K.pluralize(subcluster_cpus, 'cpu')}, #{subcluster_cores} #{G5K.pluralize(subcluster_cores, 'core')})"]
137+
end
133138

134-
accelerators = nil
135-
if h['gpu_str'] != '' && h['mic_str'] != ''
136-
accelerators = 'GPU/Xeon Phi'
137-
elsif h['gpu_str'] != ''
138-
accelerators = 'GPU'
139-
elsif h['mic_str'] != ''
140-
accelerators = 'Xeon Phi'
141-
end
142-
hash = {}
143-
hash['Access condition'] = access_conditions.join(", ") if not access_conditions.empty?
144-
hash.merge!({
145-
'Model' => h['model'],
146-
'Date of arrival' => h['date'],
147-
'CPU' => h['processor_description'],
148-
'Memory' => h['ram_size'] + (!h['pmem_size'].nil? ? " + #{h['pmem_size']} [[PMEM]]" : ''),
149-
'Storage' => h['storage_description'],
150-
'Network' => h['network_description'],
151-
})
152-
hash[accelerators] = h['accelerators_long'] if accelerators
153-
text_data << MW::generate_hash_table(hash)
139+
accelerators = nil
140+
if h['gpu_str'] != '' && h['mic_str'] != ''
141+
accelerators = 'GPU/Xeon Phi'
142+
elsif h['gpu_str'] != ''
143+
accelerators = 'GPU'
144+
elsif h['mic_str'] != ''
145+
accelerators = 'Xeon Phi'
146+
end
147+
hash = {}
148+
hash['Access condition'] = access_conditions.join(", ") if not access_conditions.empty?
149+
hash.merge!({
150+
'Model' => h['model'],
151+
'Date of arrival' => h['date'],
152+
'CPU' => h['processor_description'],
153+
'Memory' => h['ram_size'] + (!h['pmem_size'].nil? ? " + #{h['pmem_size']} [[PMEM]]" : ''),
154+
'Storage' => h['storage_description'],
155+
'Network' => h['network_description'],
156+
})
157+
hash[accelerators] = h['accelerators_long'] if accelerators
158+
text_data << MW::generate_hash_table(hash)
159+
}
154160
}
155161
}
156162

157-
generated_content = "\n= Cluster details =\n"
158-
generated_content += text_data.join("\n")
159-
return generated_content
163+
return text_data.join("\n")
160164
end
161165
end
162166

0 commit comments

Comments
 (0)