Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions lib/common/interface/mysqlstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,14 @@ def _do_load_files(self, dataset): #override

dataset.files.add(lfile)

def _do_check_if_on(self, datasetname, sitename): #override
query = 'SELECT COUNT(*) FROM `datasets` AS d'
query += ' INNER JOIN `dataset_replicas` AS dr ON dr.`dataset_id` = d.`id`'
query += ' INNER JOIN `sites` AS s ON s.`id` = dr.`site_id`'
query += ' WHERE d.`name` = %s and s.`name` = %s'

return (self._mysql.query(query, datasetname, sitename)[0] != 0)

def _do_find_block_of(self, fullpath, datasets): #override
query = 'SELECT d.`name`, b.`name` FROM `files` AS f'
query += ' INNER JOIN `datasets` AS d ON d.`id` = f.`dataset_id`'
Expand Down
9 changes: 9 additions & 0 deletions lib/common/interface/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,15 @@ def load_files(self, dataset):
finally:
self.release_lock()

def check_if_on(self, datasetname, sitename):
"""
Return true/false if replica is on specific site.
"""

logger.debug('_do_check_if_on()')

return self._do_check_if_on(datasetname, sitename)

def find_block_of(self, fullpath, datasets):
"""
Return the Block object for the given file.
Expand Down
1 change: 1 addition & 0 deletions lib/detox/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@
activity_indicator = '/home/cmsprod/public_html/IntelROCCS/Detox/inActionLock.txt',
deletion_per_iteration = 0.01, # fraction of quota to delete per iteration
deletion_volume_per_request = 50, # size to delete per deletion request in TB
exclude_if_on = ['T1_IT_CNAF_MSS'], # if a dataset has a replica on these [sites], don't consider it for deletions. Introduced because of CNAF indicent.
time_shift = 0. # number of days in the future from which to evaluate the policies
)
10 changes: 10 additions & 0 deletions lib/detox/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,16 @@ def _execute_policy(self, policy, is_test, comment):
# will also select out replicas on sites with quotas
all_replicas = policy.partition_replicas(self.inventory_manager, target_sites)

# check if replica is present on other site(s) that trigger exclusion from the possible deletion
# possible use case: a tape site has had a water indicident, for example
# Communication with database is needed because we do not have all (tape) replicas in memory
if len(detox_config.main.exclude_if_on) > 0:
for replica in all_replicas:
ds_name = replica.dataset.name
for sitename in detox_config.main.exclude_if_on:
if self.inventory_manager.store.check_if_on(ds_name,sitename):
replica.dataset.demand['on_protected_site'] = True

logger.info('Saving site and dataset states.')

# update site and dataset lists
Expand Down
11 changes: 11 additions & 0 deletions lib/detox/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,16 @@ def _get(self, dataset):
except KeyError:
return 0.

class DatasetOnProtectedSite(DatasetAttr):
def __init__(self):
DatasetAttr.__init__(self, Attr.BOOL_TYPE)

def _get(self, dataset):
try:
return dataset.demand['on_protected_site']
except KeyError:
return False

class ReplicaSize(DatasetReplicaAttr):
def __init__(self):
DatasetReplicaAttr.__init__(self, Attr.NUMERIC_TYPE)
Expand Down Expand Up @@ -272,6 +282,7 @@ def _get(self, site):
'dataset.usage_rank': DatasetUsageRank(),
'dataset.demand_rank': DatasetDemandRank(),
'dataset.release': DatasetRelease(),
'dataset.on_protected_site': DatasetOnProtectedSite(),
'replica.is_last_transfer_source': ReplicaIsLastSource(),
'replica.size': ReplicaSize(),
'replica.incomplete': ReplicaIncomplete(),
Expand Down