Skip to content

Commit

Permalink
Merge pull request #1054 from valsdav/fix-rucio-pnf-slash
Browse files Browse the repository at this point in the history
fix: Do not add trailing slash in xrootd urls
  • Loading branch information
lgray committed Mar 13, 2024
2 parents a045380 + 644cead commit 39403c7
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 4 deletions.
4 changes: 3 additions & 1 deletion src/coffea/dataset_tools/dataset_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,9 @@ def do_replicas(self, mode=None, selection=None):
ind = list(
map(
int,
Prompt.ask("Enter list of sites index to be used").split(" "),
Prompt.ask(
"Enter list of sites index to be used", default="0"
).split(" "),
)
)
sites_to_use = [list(sorted_sites.keys())[i] for i in ind]
Expand Down
17 changes: 14 additions & 3 deletions src/coffea/dataset_tools/rucio_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import re
import subprocess
import time
from collections import defaultdict

from rucio.client import Client
Expand Down Expand Up @@ -64,8 +65,16 @@ def get_xrootd_sites_map():
This function returns the list of xrootd prefix rules for each site.
"""
sites_xrootd_access = defaultdict(dict)
# TODO Do not rely on local sites_map cache. Just reload it?
if not os.path.exists(".sites_map.json"):
# Check if the cache file has been modified in the last 10 minutes
cache_valid = False
if os.path.exists(".sites_map.json"):
file_time = os.path.getmtime(".sites_map.json")
current_time = time.time()
ten_minutes_ago = current_time - 600
if file_time > ten_minutes_ago:
cache_valid = True

if not os.path.exists(".sites_map.json") or not cache_valid:
print("Loading SITECONF info")
sites = [
(s, "/cvmfs/cms.cern.ch/SITECONF/" + s + "/storage.json")
Expand Down Expand Up @@ -96,6 +105,7 @@ def get_xrootd_sites_map():
)
else:
sites_xrootd_access[site["rse"]] = proc["prefix"]

json.dump(sites_xrootd_access, open(".sites_map.json", "w"))

return json.load(open(".sites_map.json"))
Expand All @@ -114,7 +124,8 @@ def _get_pfn_for_site(path, rules):
pfn = pfn.replace(f"${i+1}", grs[i])
return pfn
else:
return rules + "/" + path
# not adding any slash as the path usually starts with it
return rules + "/" + path.removeprefix("/")


def get_dataset_files_replicas(
Expand Down

0 comments on commit 39403c7

Please sign in to comment.