Skip to content

Commit

Permalink
[gen/wiki] Fix wiki links to exotic clusters by simplifying hardware …
Browse files Browse the repository at this point in the history
…page headings

The page heading for each cluster currently contains the queue and the job
type, for instance:

== pyxis (exotic job type) ==
== graoully (production queue) ==
== drac (testing queue, exotic job type) ==

It means that anchors links are horrible, because they are automatically
generated by Mediawiki.  For instance:

  https://www.grid5000.fr/w/Nancy:Hardware#graoully_.28production_queue.29

We try to generate these horrible links in several places in the wiki
generation code, which is difficult.  In addition, it fails to take into
account the exotic job type, so all links to exotic clusters are currently
broken.

Fix this by only keeping the cluster name in the heading: this way, we
don't need to guess the horrible link anchors generated by Mediawiki.
The missing information (queue and job type i.e. "access condition") is
moved to the table presenting the cluster characteristics.
  • Loading branch information
jonglezb committed Dec 3, 2020
1 parent 33a2391 commit d75119d
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 10 deletions.
8 changes: 3 additions & 5 deletions lib/refrepo/gen/wiki/generators/hardware.rb
Expand Up @@ -381,9 +381,7 @@ def generate_storage
((!d['reservation'].nil? && d['reservation']) ? '[[Disk_reservation|*]]' : '')
}.join(', ') + ")"
end
queues = cluster_hash['queues'] - ['admin', 'default']
queue_t = (queues.nil? || (queues.empty? ? '' : "_.28" + queues[0].gsub(' ', '_') + ' queue.29'))
nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks, 'queue' => queue_t } }
nodes_data << { 'uid' => node_uid, 'data' => { 'main' => maindisk_t, 'hdd' => hdd_t, 'ssd' => ssd_t, 'reservation' => reservable_disks } }
end
nd = nodes_data.group_by { |d| d['data'] }
nd.each do |data, nodes|
Expand All @@ -396,7 +394,7 @@ def generate_storage
end
table_data << [
"[[#{site_uid.capitalize}:Hardware|#{site_uid.capitalize}]]",
"[[#{site_uid.capitalize}:Hardware##{cluster_uid}#{data['queue']}|#{nodesetname}]]",
"[[#{site_uid.capitalize}:Hardware##{cluster_uid}|#{nodesetname}]]",
nodes.length,
data['main'],
data['hdd'],
Expand Down Expand Up @@ -445,7 +443,7 @@ def generate_interfaces
network_interfaces.sort.to_h.each { |num, interfaces|
table_data << [
"[[#{site_uid.capitalize}:Network|#{site_uid.capitalize}]]",
"[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + (interfaces['queues'] == '' ? '' : "_.28#{queues.gsub(' ', '_')}.29") + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
"[[#{site_uid.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}" + (network_interfaces.size==1 ? '' : '-' + G5K.nodeset(num)) + "]]",
num.count,
interfaces['25g_count'].zero? ? '' : interfaces['25g_count'],
interfaces['10g_count'].zero? ? '' : interfaces['10g_count'],
Expand Down
11 changes: 6 additions & 5 deletions lib/refrepo/gen/wiki/generators/site_hardware.rb
Expand Up @@ -77,14 +77,13 @@ def self.generate_summary_data(site, with_sites)
hardware[site].sort.to_h.each { |cluster_uid, cluster_hash|
cluster_nodes = cluster_hash.keys.flatten.count
queue = cluster_hash.map { |k, v| v['queue']}.first
queue_str = cluster_hash.map { |k, v| v['queue_str']}.first
access_conditions = []
access_conditions << "<b>#{queue}</b>&nbsp;queue" if queue != ''
access_conditions << '<b>exotic</b>&nbsp;job&nbsp;type' if cluster_hash.map { |k, v| v['exotic']}.first
table_columns = (with_sites == true ? ['Site'] : []) + ['Cluster', 'Access Condition', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + ((site_accelerators.zero? && with_sites == false) ? [] : ['Accelerators'])
data = partition(cluster_hash)
table_data << (with_sites == true ? ["[[#{site.capitalize}:Hardware|#{site.capitalize}]]"] : []) + [
(with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + (queue_str == '' ? '' : "_.28#{queue_str.gsub(' ', '_')}.29") + "|#{cluster_uid}]]"),
(with_sites == true ? "[[#{site.capitalize}:Hardware##{cluster_uid}" + "|#{cluster_uid}]]" : "[[##{cluster_uid}" + "|#{cluster_uid}]]"),
access_conditions.join(",<br/>"),
cell_data(data, 'date'),
cluster_nodes,
Expand Down Expand Up @@ -120,7 +119,7 @@ def self.generate_description(site)
access_conditions << "exotic job type" if cluster_hash.map { |k, v| v['exotic']}.first
table_columns = ['Cluster', 'Queue', 'Date of arrival', { attributes: 'data-sort-type="number"', text: 'Nodes' }, 'CPU', { attributes: 'data-sort-type="number"', text: 'Cores' }, { attributes: 'data-sort-type="number"', text: 'Memory' }, { attributes: 'data-sort-type="number"', text: 'Storage' }, { attributes: 'data-sort-type="number"', text: 'Network' }] + (site_accelerators.zero? ? [] : ['Accelerators'])

text_data << ["\n== #{cluster_uid}" + (access_conditions.empty? ? '' : " (#{access_conditions.join(", ")})") + " ==\n"]
text_data << ["\n== #{cluster_uid} ==\n"]
text_data << ["'''#{cluster_nodes} #{G5K.pluralize(cluster_nodes, 'node')}, #{cluster_cpus} #{G5K.pluralize(cluster_cpus, 'cpu')}, #{cluster_cores} #{G5K.pluralize(cluster_cores, 'core')}" + (subclusters == true ? ",''' split as follows due to differences between nodes " : "''' ") + "([https://public-api.grid5000.fr/stable/sites/#{site}/clusters/#{cluster_uid}/nodes.json?pretty=1 json])"]

cluster_hash.sort.to_h.each_with_index { |(num, h), i|
Expand All @@ -140,14 +139,16 @@ def self.generate_description(site)
elsif h['mic_str'] != ''
accelerators = 'Xeon Phi'
end
hash = {
hash = {}
hash['Access condition'] = access_conditions.join(", ") if not access_conditions.empty?
hash.merge!({
'Model' => h['model'],
'Date of arrival' => h['date'],
'CPU' => h['processor_description'],
'Memory' => h['ram_size'] + (!h['pmem_size'].nil? ? " + #{h['pmem_size']} [[PMEM]]" : ''),
'Storage' => h['storage_description'],
'Network' => h['network_description'],
}
})
hash[accelerators] = h['accelerators_long'] if accelerators
text_data << MW::generate_hash_table(hash)
}
Expand Down

0 comments on commit d75119d

Please sign in to comment.