diff --git a/etc/pip-requires b/etc/pip-requires index db1845fc94..9d9e4bffb0 100644 --- a/etc/pip-requires +++ b/etc/pip-requires @@ -1,7 +1,7 @@ # All dependencies needed to run rucio should be defined here -SQLAlchemy==1.3.7 # DB backend -alembic==1.4.1 # Lightweight database migration tool for SQLAlchemy +SQLAlchemy==1.3.20 # DB backend +alembic==1.4.3 # Lightweight database migration tool for SQLAlchemy web.py==0.39; python_version <= '2.7' # Python web framework for Python2 web.py==0.40; python_version > '2.7' # Python web framework for Python3 python-memcached==1.59; python_version <= '2.7' # Quick and small memcached client for Python2 diff --git a/lib/rucio/api/account.py b/lib/rucio/api/account.py index 2a4b3fae06..5d7c28d1c4 100644 --- a/lib/rucio/api/account.py +++ b/lib/rucio/api/account.py @@ -1,22 +1,28 @@ -# Copyright European Organization for Nuclear Research (CERN) +# -*- coding: utf-8 -*- +# Copyright 2012-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # -# Authors: -# - Vincent Garonne, , 2011-2013 -# - Angelos Molfetas, , 2011 -# - Thomas Beermann, , 2012 -# - Mario Lassnig, , 2012 -# - Martin Barisits, , 2014 -# - Joaquin Bogado, , 2015 -# - Cedric Serfon, , 2015-2019 -# - Hannes Hansen, , 2018 -# - Andrew Lister, , 2019 -# - Patrick Austin, , 2020 +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # -# PY3K COMPATIBLE +# Authors: +# - Mario Lassnig , 2012-2020 +# - Vincent Garonne , 2012-2015 +# - Thomas Beermann , 2012 +# - Martin Barisits , 2014 +# - Joaquín Bogado , 2015 +# - Cedric Serfon , 2015-2019 +# - Hannes Hansen , 2018 +# - Andrew Lister , 2019 +# - Patrick Austin , 2020 import rucio.api.permission import rucio.common.exception @@ -51,7 +57,7 @@ def add_account(account, type, email, issuer, vo='def'): account = InternalAccount(account, vo=vo) - account_core.add_account(account, AccountType.from_sym(type), email) + account_core.add_account(account, AccountType[type.upper()], email) def del_account(account, issuer, vo='def'): diff --git a/lib/rucio/api/did.py b/lib/rucio/api/did.py index bf7b8b28ec..9b388d39c1 100644 --- a/lib/rucio/api/did.py +++ b/lib/rucio/api/did.py @@ -17,7 +17,7 @@ # - Vincent Garonne , 2013-2017 # - Cedric Serfon , 2013-2020 # - Ralph Vigne , 2013 -# - Mario Lassnig , 2013-2015 +# - Mario Lassnig , 2013-2020 # - Yun-Pin Sun , 2013 # - Thomas Beermann , 2013 # - Martin Barisits , 2014-2020 @@ -155,9 +155,9 @@ def add_did(scope, name, type, issuer, account=None, statuses={}, meta={}, rules raise rucio.common.exception.InvalidObject("Provided metadata %s doesn't match the naming convention: %s != %s" % (k, meta[k], extra_meta[k])) # Validate metadata - meta_core.validate_meta(meta=meta, did_type=DIDType.from_sym(type)) + meta_core.validate_meta(meta=meta, did_type=DIDType[type.upper()]) - return did.add_did(scope=scope, name=name, type=DIDType.from_sym(type), account=account or issuer, + return did.add_did(scope=scope, name=name, type=DIDType[type.upper()], account=account or issuer, statuses=statuses, meta=meta, rules=rules, lifetime=lifetime, dids=dids, rse_id=rse_id) @@ -295,7 +295,7 @@ def list_new_dids(type=None, thread=None, total_threads=None, chunk_size=1000, v :param chunk_size: Number of requests to return per yield. :param vo: The VO to act on. """ - dids = did.list_new_dids(did_type=type and DIDType.from_sym(type), thread=thread, total_threads=total_threads, chunk_size=chunk_size) + dids = did.list_new_dids(did_type=type and DIDType[type.upper()], thread=thread, total_threads=total_threads, chunk_size=chunk_size) for d in dids: if d['scope'].vo == vo: yield api_update_return_dict(d) diff --git a/lib/rucio/api/identity.py b/lib/rucio/api/identity.py index 603d54b2e8..4dd2d48147 100644 --- a/lib/rucio/api/identity.py +++ b/lib/rucio/api/identity.py @@ -1,25 +1,32 @@ -# Copyright European Organization for Nuclear Research (CERN) +# -*- coding: utf-8 -*- +# Copyright 2012-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # Authors: -# - Vincent Garonne, , 2012 -# - Mario Lassnig, , 2012, 2017 -# - Tomas Kouba, , 2014 -# - Thomas Beermann, , 2014 -# - Hannes Hansen, , 2019 -# - Andrew Lister, , 2019 +# - Mario Lassnig , 2012-2020 +# - Vincent Garonne , 2012-2015 +# - Tomáš Kouba , 2014 +# - Thomas Beermann , 2014 +# - Martin Barisits , 2017 +# - Hannes Hansen , 2018-2019 +# - Andrew Lister , 2019 # - Ruturaj Gujar , 2019 -# -# PY3K COMPATIBLE """ Interface for identity abstraction layer """ - from rucio.api import permission from rucio.common import exception from rucio.common.types import InternalAccount @@ -36,7 +43,7 @@ def add_identity(identity_key, id_type, email, password=None): :param email: The Email address associated with the identity. :param password: If type==userpass, this sets the password. """ - return identity.add_identity(identity_key, IdentityType.from_sym(id_type), email, password=password) + return identity.add_identity(identity_key, IdentityType[id_type.upper()], email, password=password) def del_identity(identity_key, id_type, issuer, vo='def'): @@ -47,7 +54,7 @@ def del_identity(identity_key, id_type, issuer, vo='def'): :param issuer: The issuer account. :param vo: the VO of the issuer. """ - id_type = IdentityType.from_sym(id_type) + id_type = IdentityType[id_type.upper()] kwargs = {'accounts': identity.list_accounts_for_identity(identity_key, id_type)} if not permission.has_permission(issuer=issuer, vo=vo, action='del_identity', kwargs=kwargs): raise exception.AccessDenied('Account %s can not delete identity' % (issuer)) @@ -74,7 +81,7 @@ def add_account_identity(identity_key, id_type, account, email, issuer, default= account = InternalAccount(account, vo=vo) - return identity.add_account_identity(identity=identity_key, type=IdentityType.from_sym(id_type), default=default, email=email, account=account, password=password) + return identity.add_account_identity(identity=identity_key, type=IdentityType[id_type.upper()], default=default, email=email, account=account, password=password) def del_account_identity(identity_key, id_type, account, issuer, vo='def'): @@ -93,7 +100,7 @@ def del_account_identity(identity_key, id_type, account, issuer, vo='def'): account = InternalAccount(account, vo=vo) - return identity.del_account_identity(identity_key, IdentityType.from_sym(id_type), account) + return identity.del_account_identity(identity_key, IdentityType[id_type.upper()], account) def list_identities(**kwargs): @@ -112,7 +119,7 @@ def get_default_account(identity_key, id_type): :param identity_key: The identity key name. For example x509 DN, or a username. :param id_type: The type of the authentication (x509, gss, userpass, ssh, saml). """ - account = identity.get_default_account(identity_key, IdentityType.from_sym(id_type)) + account = identity.get_default_account(identity_key, IdentityType[id_type.upper()]) return account.external @@ -125,5 +132,5 @@ def list_accounts_for_identity(identity_key, id_type): returns: A list of all accounts for the identity. """ - accounts = identity.list_accounts_for_identity(identity_key, IdentityType.from_sym(id_type)) + accounts = identity.list_accounts_for_identity(identity_key, IdentityType[id_type.upper()]) return [account.external for account in accounts] diff --git a/lib/rucio/api/vo.py b/lib/rucio/api/vo.py index ad3350bb61..d87f2f629a 100644 --- a/lib/rucio/api/vo.py +++ b/lib/rucio/api/vo.py @@ -1,4 +1,5 @@ -# Copyright 2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,7 @@ # Authors: # - Andrew Lister , 2019 # - Patrick Austin , 2020 - +# - Mario Lassnig , 2020 from rucio.api.permission import has_permission from rucio.common import exception @@ -79,7 +80,7 @@ def recover_vo_root_identity(root_vo, identity_key, id_type, email, issuer, defa account = InternalAccount('root', vo=root_vo) - return identity.add_account_identity(identity=identity_key, type=IdentityType.from_sym(id_type), default=default, email=email, account=account, password=password) + return identity.add_account_identity(identity=identity_key, type=IdentityType[id_type.upper()], default=default, email=email, account=account, password=password) def update_vo(updated_vo, parameters, issuer, vo='def'): diff --git a/lib/rucio/common/utils.py b/lib/rucio/common/utils.py index 3ab8d09592..a76f5959f3 100644 --- a/lib/rucio/common/utils.py +++ b/lib/rucio/common/utils.py @@ -53,6 +53,10 @@ import threading import time import zlib + +from enum import Enum +from logging import getLogger, Formatter +from logging.handlers import RotatingFileHandler from uuid import uuid4 as uuid from xml.etree import ElementTree from logging.handlers import RotatingFileHandler @@ -92,12 +96,6 @@ # Extra modules: Only imported if available EXTRA_MODULES = {'paramiko': False} -try: - from rucio.db.sqla.enum import EnumSymbol - EXTRA_MODULES['rucio.db.sqla.enum'] = True -except ImportError: - EXTRA_MODULES['rucio.db.sqla.enum'] = False - for extra_module in EXTRA_MODULES: try: imp.find_module(extra_module) @@ -406,8 +404,8 @@ def default(self, obj): # pylint: disable=E0202 return obj.isoformat() elif isinstance(obj, datetime.timedelta): return obj.days * 24 * 60 * 60 + obj.seconds - elif isinstance(obj, EnumSymbol): - return obj.description + elif isinstance(obj, Enum): + return obj.name elif isinstance(obj, (InternalAccount, InternalScope)): return obj.external return json.JSONEncoder.default(self, obj) diff --git a/lib/rucio/core/account.py b/lib/rucio/core/account.py index b258bd9c9e..3f2b0f1e45 100644 --- a/lib/rucio/core/account.py +++ b/lib/rucio/core/account.py @@ -1,4 +1,5 @@ -# Copyright 2012-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2012-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +16,11 @@ # Authors: # - Thomas Beermann , 2012 # - Angelos Molfetas , 2012 -# - Mario Lassnig , 2012-2019 -# - Vincent Garonne , 2012-2015 +# - Mario Lassnig , 2012-2020 +# - Vincent Garonne , 2012-2015 # - Martin Barisits , 2014 # - Cedric Serfon , 2014-2019 -# - Joaquin Bogado , 2015 +# - Joaquín Bogado , 2015 # - Hannes Hansen , 2018-2019 # - Andrew Lister , 2019 # - Patrick Austin , 2020 @@ -27,8 +28,8 @@ # # PY3K COMPATIBLE - from datetime import datetime +from enum import Enum from re import match from traceback import format_exc @@ -44,7 +45,6 @@ from rucio.core.vo import vo_exists from rucio.db.sqla import models from rucio.db.sqla.constants import AccountStatus, AccountType -from rucio.db.sqla.enum import EnumSymbol from rucio.db.sqla.session import read_session, transactional_session, stream_session from six import string_types @@ -143,7 +143,7 @@ def update_account(account, key, value, session=None): raise exception.AccountNotFound('Account with ID \'%s\' cannot be found' % account) if key == 'status': if isinstance(value, string_types): - value = AccountStatus.from_sym(value) + value = AccountStatus[value] if value == AccountStatus.SUSPENDED: query.update({'status': value, 'suspended_at': datetime.utcnow()}) elif value == AccountStatus.ACTIVE: @@ -166,8 +166,8 @@ def list_accounts(filter={}, session=None): for filter_type in filter: if filter_type == 'account_type': if isinstance(filter['account_type'], string_types): - query = query.filter_by(account_type=AccountType.from_sym(filter['account_type'])) - elif isinstance(filter['account_type'], EnumSymbol): + query = query.filter_by(account_type=AccountType[filter['account_type']]) + elif isinstance(filter['account_type'], Enum): query = query.filter_by(account_type=filter['account_type']) elif filter_type == 'identity': diff --git a/lib/rucio/core/did.py b/lib/rucio/core/did.py index 17076b88ca..27e001d1b7 100644 --- a/lib/rucio/core/did.py +++ b/lib/rucio/core/did.py @@ -18,7 +18,7 @@ # - Martin Barisits , 2013-2020 # - Cedric Serfon , 2013-2020 # - Ralph Vigne , 2013 -# - Mario Lassnig , 2013-2019 +# - Mario Lassnig , 2013-2020 # - Yun-Pin Sun , 2013 # - Thomas Beermann , 2013-2018 # - Joaquín Bogado , 2014-2015 @@ -39,6 +39,7 @@ import random import sys from datetime import datetime, timedelta +from enum import Enum from hashlib import md5 from re import match @@ -60,7 +61,6 @@ from rucio.core.naming_convention import validate_name from rucio.db.sqla import models, filter_thread_work from rucio.db.sqla.constants import DIDType, DIDReEvaluation, DIDAvailability, RuleState -from rucio.db.sqla.enum import EnumSymbol from rucio.db.sqla.session import read_session, transactional_session, stream_session logging.basicConfig(stream=sys.stdout, @@ -158,7 +158,7 @@ def add_dids(dids, account, session=None): try: if isinstance(did['type'], string_types): - did['type'] = DIDType.from_sym(did['type']) + did['type'] = DIDType[did['type']] if did['type'] == DIDType.FILE: raise exception.UnsupportedOperation("Only collection (dataset/container) can be registered." % locals()) @@ -836,8 +836,8 @@ def list_new_dids(did_type, thread=None, total_threads=None, chunk_size=1000, se if did_type: if isinstance(did_type, string_types): - query = query.filter_by(did_type=DIDType.from_sym(did_type)) - elif isinstance(did_type, EnumSymbol): + query = query.filter_by(did_type=DIDType[did_type]) + elif isinstance(did_type, Enum): query = query.filter_by(did_type=did_type) query = filter_thread_work(session=session, query=query, total_threads=total_threads, thread_id=thread, hash_variable='name') diff --git a/lib/rucio/core/importer.py b/lib/rucio/core/importer.py index a72f680824..2024361978 100644 --- a/lib/rucio/core/importer.py +++ b/lib/rucio/core/importer.py @@ -1,4 +1,5 @@ -# Copyright 2012-2018 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +15,11 @@ # # Authors: # - Hannes Hansen , 2018-2019 -# - Andrew Lister, , 2019 +# - Andrew Lister , 2019 # - Aristeidis Fkiaras , 2019 # - Eli Chadwick , 2020 -# -# PY3K COMPATIBLE +# - Tomas Javurek , 2020 +# - Mario Lassnig , 2020 from six import string_types from rucio.common.exception import RSEOperationNotSupported @@ -36,7 +37,7 @@ def import_rses(rses, rse_sync_method='edit', attr_sync_method='edit', protocol_ for rse_name in rses: rse = rses[rse_name] if isinstance(rse.get('rse_type'), string_types): - rse['rse_type'] = RSEType.from_string(str(rse['rse_type'])) + rse['rse_type'] = RSEType(rse['rse_type']) if rse_module.rse_exists(rse_name, vo=vo, include_deleted=False, session=session): # RSE exists and is active @@ -161,7 +162,7 @@ def import_distances(distances, vo='def', session=None): @transactional_session def import_identities(identities, account_name, old_identities, old_identity_account, account_email, session=None): for identity in identities: - identity['type'] = IdentityType.from_sym(identity['type']) + identity['type'] = IdentityType[identity['type'].upper()] missing_identities = [identity for identity in identities if (identity['identity'], identity['type']) not in old_identities] missing_identity_account = [identity for identity in identities if (identity['identity'], identity['type'], account_name) not in old_identity_account] diff --git a/lib/rucio/core/lifetime_exception.py b/lib/rucio/core/lifetime_exception.py index 26c736f2ab..5f38bf7e65 100644 --- a/lib/rucio/core/lifetime_exception.py +++ b/lib/rucio/core/lifetime_exception.py @@ -1,4 +1,5 @@ -# Copyright 2017-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2017-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,13 +14,13 @@ # limitations under the License. # # Authors: -# - Cedric Serfon, , 2016-2018 -# - Dimitrios Christidis, 2018 -# - Hannes Hansen, , 2018 -# - Andrew Lister, , 2019 +# - Cedric Serfon , 2017-2018 +# - Dimitrios Christidis , 2018 +# - Martin Barisits , 2018-2019 +# - Hannes Hansen , 2018-2019 +# - Andrew Lister , 2019 # - Benedikt Ziemons , 2020 -# -# PY3K COMPATIBLE +# - Mario Lassnig , 2020 from __future__ import division @@ -111,7 +112,7 @@ def add_exception(dids, account, pattern, comments, expires_at, session=None): did_type = None if 'did_type' in did: if isinstance(did['did_type'], string_types): - did_type = DIDType.from_sym(did['did_type']) + did_type = DIDType[did['did_type']] else: did_type = did['did_type'] new_exception = models.LifetimeExceptions(id=exception_id, scope=did['scope'], name=did['name'], did_type=did_type, diff --git a/lib/rucio/core/meta.py b/lib/rucio/core/meta.py index a29860940b..0127cd16ac 100644 --- a/lib/rucio/core/meta.py +++ b/lib/rucio/core/meta.py @@ -1,4 +1,5 @@ -# Copyright 2012-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2012-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,16 +15,15 @@ # # Authors: # - Vincent Garonne , 2012-2015 -# - Mario Lassnig , 2013 +# - Mario Lassnig , 2013-2020 # - Hannes Hansen , 2018 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 # - Martin Barisits , 2020 -# -# PY3K COMPATIBLE from __future__ import print_function from re import match +from six import string_types from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -54,7 +54,11 @@ def add_key(key, key_type, value_type=None, value_regexp=None, session=None): raise UnsupportedValueType('The type \'%(value_type)s\' is not supported for values!' % locals()) # Convert key_type - key_type = str(key_type) + if isinstance(key_type, string_types): + key_type = str(key_type) + else: + key_type = str(key_type.value) + if key_type == 'F': key_type = 'FILE' elif key_type == 'D': @@ -63,7 +67,7 @@ def add_key(key, key_type, value_type=None, value_regexp=None, session=None): key_type = 'CONTAINER' try: - key_type = KeyType.from_string(key_type) + key_type = KeyType(key_type) except ValueError: raise UnsupportedKeyType('The type \'%s\' is not supported for keys!' % str(key_type)) diff --git a/lib/rucio/core/replica.py b/lib/rucio/core/replica.py index 99f99cf30e..e07f8e347e 100644 --- a/lib/rucio/core/replica.py +++ b/lib/rucio/core/replica.py @@ -114,7 +114,7 @@ def get_bad_replicas_summary(rse_expression=None, from_date=None, to_date=None, for row in summary: if (row[2], row[1], row[4]) not in incidents: incidents[(row[2], row[1], row[4])] = {} - incidents[(row[2], row[1], row[4])][str(row[3])] = row[0] + incidents[(row[2], row[1], row[4])][str(row[3].name)] = row[0] for incident in incidents: res = incidents[incident] @@ -123,6 +123,7 @@ def get_bad_replicas_summary(rse_expression=None, from_date=None, to_date=None, res['created_at'] = incident[1] res['reason'] = incident[2] result.append(res) + return result @@ -1005,7 +1006,7 @@ def _list_replicas(dataset_clause, file_clause, state_clause, show_pfns, if file['scope'] == scope and file['name'] == name: # extract properly the pfn from the tuple file['rses'][rse_id] += list(set([tmp_pfn[0] for tmp_pfn in pfns])) - file['states'][rse_id] = str(state) + file['states'][rse_id] = str(state.name if state else state) if resolve_parents: file['parents'] = ['%s:%s' % (parent['scope'].internal, parent['name']) @@ -1014,7 +1015,7 @@ def _list_replicas(dataset_clause, file_clause, state_clause, show_pfns, for tmp_pfn in pfns: file['pfns'][tmp_pfn[0]] = {'rse_id': tmp_pfn[4]['rse_id'] if tmp_pfn[1] == 'zip' else rse_id, 'rse': tmp_pfn[4]['rse'] if tmp_pfn[1] == 'zip' else rse, - 'type': tmp_pfn[4]['type'] if tmp_pfn[1] == 'zip' else str(rse_type), + 'type': tmp_pfn[4]['type'] if tmp_pfn[1] == 'zip' else str(rse_type.name), 'volatile': tmp_pfn[4]['volatile'] if tmp_pfn[1] == 'zip' else volatile, 'domain': tmp_pfn[1], 'priority': tmp_pfn[2], @@ -1051,7 +1052,7 @@ def _list_replicas(dataset_clause, file_clause, state_clause, show_pfns, file['scope'], file['name'] = scope, name file['bytes'], file['md5'], file['adler32'] = bytes, md5, adler32 file['pfns'], file['rses'] = {}, defaultdict(list) - file['states'] = {rse_id: str(state)} + file['states'] = {rse_id: str(state.name if state else state)} if resolve_parents: file['parents'] = ['%s:%s' % (parent['scope'].internal, parent['name']) @@ -1278,15 +1279,10 @@ def __bulk_add_replicas(rse_id, files, account, session=None): new_replicas.append({'rse_id': rse_id, 'scope': file['scope'], 'name': file['name'], 'bytes': file['bytes'], 'path': file.get('path'), - 'state': ReplicaState.from_string(file.get('state', 'A')), + 'state': ReplicaState(file.get('state', 'A')), 'md5': file.get('md5'), 'adler32': file.get('adler32'), 'lock_cnt': file.get('lock_cnt', 0), 'tombstone': file.get('tombstone')}) -# new_replica = models.RSEFileAssociation(rse_id=rse_id, scope=file['scope'], name=file['name'], bytes=file['bytes'], -# path=file.get('path'), state=ReplicaState.from_string(file.get('state', 'A')), -# md5=file.get('md5'), adler32=file.get('adler32'), lock_cnt=file.get('lock_cnt', 0), -# tombstone=file.get('tombstone')) -# new_replica.save(session=session, flush=False) try: new_replicas and session.bulk_insert_mappings(models.RSEFileAssociation, new_replicas) @@ -1835,7 +1831,7 @@ def update_replicas_states(replicas, nowait=False, add_tombstone=False, session= raise exception.ReplicaNotFound("No row found for scope: %s name: %s rse: %s" % (replica['scope'], replica['name'], get_rse_name(replica['rse_id'], session=session))) if isinstance(replica['state'], string_types): - replica['state'] = ReplicaState.from_string(replica['state']) + replica['state'] = ReplicaState(replica['state']) values = {'state': replica['state']} if replica['state'] == ReplicaState.BEING_DELETED: @@ -2909,8 +2905,9 @@ def add_bad_pfns(pfns, account, state, reason=None, expires_at=None, session=Non :returns: True is successful. """ + if isinstance(state, string_types): - rep_state = BadPFNStatus.from_sym(state) + rep_state = BadPFNStatus[state] else: rep_state = state @@ -3016,7 +3013,7 @@ def get_suspicious_files(rse_expression, filter=None, **kwargs): # assembling exclude_states_clause exclude_states_clause = [] for state in exclude_states: - exclude_states_clause.append(BadFilesStatus.from_string(state)) + exclude_states_clause.append(BadFilesStatus(state)) # making aliases for bad_replicas and replicas tables bad_replicas_alias = aliased(models.BadReplicas, name='bad_replicas_alias') diff --git a/lib/rucio/core/rse.py b/lib/rucio/core/rse.py index c33e5537ab..415c5f7120 100644 --- a/lib/rucio/core/rse.py +++ b/lib/rucio/core/rse.py @@ -1,4 +1,5 @@ -# Copyright 2012-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2012-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,24 +14,27 @@ # limitations under the License. # # Authors: -# - Vincent Garonne , 2012-2018 +# - Vincent Garonne , 2012-2018 # - Ralph Vigne , 2012-2015 # - Mario Lassnig , 2012-2020 # - Martin Barisits , 2013-2020 # - Cedric Serfon , 2013-2018 # - Thomas Beermann , 2014-2017 -# - Wen Guan , 2015-2016 +# - Wen Guan , 2015-2016 # - Brian Bockelman , 2018 # - Frank Berghaus , 2018 -# - Dimitrios Christidis , 2018-2019 -# - Hannes Hansen , 2018 -# - Gabriele Fronze' , 2019 +# - Joaquín Bogado , 2018 +# - Hannes Hansen , 2018-2019 +# - Dimitrios Christidis , 2018-2020 +# - James Perry , 2019 # - Andrew Lister , 2019 -# - Brandon White , 2019-2020 +# - Brandon White , 2019 +# - Gabriele Fronze' , 2019 # - Aristeidis Fkiaras , 2019 # - Patrick Austin , 2020 -# -# PY3K COMPATIBLE +# - Eli Chadwick , 2020 +# - Benedikt Ziemons , 2020 +# - Tomas Javurek , 2020 from __future__ import division @@ -96,7 +100,7 @@ def add_rse(rse, vo='def', deterministic=True, volatile=False, city=None, region :param session: The database session in use. """ if isinstance(rse_type, string_types): - rse_type = RSEType.from_string(str(rse_type)) + rse_type = RSEType(rse_type) new_rse = models.RSE(rse=rse, vo=vo, deterministic=deterministic, volatile=volatile, city=city, region_code=region_code, country_name=country_name, @@ -383,7 +387,7 @@ def list_rses(filters={}, session=None): for (k, v) in filters.items(): if hasattr(models.RSE, k): if k == 'rse_type': - query = query.filter(getattr(models.RSE, k) == RSEType.from_sym(v)) + query = query.filter(getattr(models.RSE, k) == RSEType[v]) else: query = query.filter(getattr(models.RSE, k) == v) elif k in ['availability_read', 'availability_write', 'availability_delete']: diff --git a/lib/rucio/core/rule.py b/lib/rucio/core/rule.py index e00304de17..e113b5ee6e 100644 --- a/lib/rucio/core/rule.py +++ b/lib/rucio/core/rule.py @@ -15,7 +15,7 @@ # # Authors: # - Vincent Garonne , 2012-2018 -# - Mario Lassnig , 2013-2019 +# - Mario Lassnig , 2013-2020 # - Martin Barisits , 2013-2020 # - Cedric Serfon , 2014-2020 # - David Cameron , 2014 @@ -824,16 +824,16 @@ def list_rules(filters={}, session=None): continue elif key == 'state': if isinstance(value, string_types): - value = RuleState.from_string(value) + value = RuleState(value) else: try: - value = RuleState.from_sym(value) + value = RuleState[value] except ValueError: pass elif key == 'did_type' and isinstance(value, string_types): - value = DIDType.from_string(value) + value = DIDType(value) elif key == 'grouping' and isinstance(value, string_types): - value = RuleGrouping.from_string(value) + value = RuleGrouping(value) query = query.filter(getattr(models.ReplicationRule, key) == value) try: diff --git a/lib/rucio/core/vo.py b/lib/rucio/core/vo.py index 224ba1eb41..993c647e3a 100644 --- a/lib/rucio/core/vo.py +++ b/lib/rucio/core/vo.py @@ -1,4 +1,5 @@ -# Copyright 2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +16,7 @@ # Authors: # - Andrew Lister , 2019 # - Patrick Austin , 2020 +# - Mario Lassnig , 2020 from sqlalchemy.exc import DatabaseError, IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -69,9 +71,9 @@ def add_vo(vo, description, email, session=None): from rucio.core.account import add_account, list_identities from rucio.core.identity import add_account_identity new_root = InternalAccount('root', vo=vo) - add_account(account=new_root, type=AccountType.from_sym('SERVICE'), email=email, session=session) + add_account(account=new_root, type=AccountType['SERVICE'], email=email, session=session) add_account_identity(identity='root@{}'.format(vo), - type=IdentityType.from_sym('userpass'), + type=IdentityType['USERPASS'], account=new_root, email=email, default=False, diff --git a/lib/rucio/daemons/transmogrifier/transmogrifier.py b/lib/rucio/daemons/transmogrifier/transmogrifier.py index 6f5db507cd..7d8c4f15ca 100644 --- a/lib/rucio/daemons/transmogrifier/transmogrifier.py +++ b/lib/rucio/daemons/transmogrifier/transmogrifier.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2013-2020 CERN +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,16 +14,12 @@ # limitations under the License. # # Authors: -# - Cedric Serfon , 2013-2020 -# - Vincent Garonne , 2014-2018 -# - David Cameron , 2014 -# - Mario Lassnig , 2015-2018 -# - Wen Guan , 2015 -# - Martin Barisits , 2016-2017 +# - Mario Lassnig , 2018-2020 # - Hannes Hansen , 2018 # - Robert Illingworth , 2019 # - Andrew Lister , 2019 # - Brandon White , 2019 +# - Cedric Serfon , 2020 # - Patrick Austin , 2020 # - Thomas Beermann , 2020 # - Eli Chadwick , 2020 @@ -433,7 +429,7 @@ def transmogrifier(bulk=5, once=False, sleep_time=60): elif did['did_type'] == str(DIDType.CONTAINER): monitor.record_counter(counters='transmogrifier.did.container.processed', delta=1) monitor.record_counter(counters='transmogrifier.did.processed', delta=1) - identifiers.append({'scope': did['scope'], 'name': did['name'], 'did_type': DIDType.from_sym(did['did_type'])}) + identifiers.append({'scope': did['scope'], 'name': did['name'], 'did_type': did['did_type']}) time1 = time.time() diff --git a/lib/rucio/db/sqla/constants.py b/lib/rucio/db/sqla/constants.py index ff55b36f26..954b7a69f0 100644 --- a/lib/rucio/db/sqla/constants.py +++ b/lib/rucio/db/sqla/constants.py @@ -1,209 +1,207 @@ -# Copyright European Organization for Nuclear Research (CERN) +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # Authors: -# - Vincent Garonne, , 2013-2017 -# - Mario Lassnig, , 2014, 2017 -# - Martin Barisits, , 2014-2019 -# - Cedric Serfon, , 2015-2018 -# - Wen Guan, , 2016 +# - Vincent Garonne , 2015-2017 +# - Wen Guan , 2015-2016 +# - Cedric Serfon , 2016 +# - Martin Barisits , 2017-2019 +# - Hannes Hansen , 2018 # - Ruturaj Gujar , 2019 # - Jaroslav Guenther , 2019 -# -# PY3K COMPATIBLE - -""" -Constants. - -Each constant is in the format: - CONSTANT_NAME = VALUE, DESCRIPTION -VALUE is what will be stored in the DB. -DESCRIPTION is the meaningful string for client -""" +# - Mario Lassnig , 2020 from datetime import datetime +from enum import Enum -from rucio.db.sqla.enum import DeclEnum - - -class AccountStatus(DeclEnum): - ACTIVE = 'ACTIVE', 'ACTIVE' - SUSPENDED = 'SUSPENDED', 'SUSPENDED' - DELETED = 'DELETED', 'DELETED' - +# Individual constants -class AccountType(DeclEnum): - USER = 'USER', 'USER' - GROUP = 'GROUP', 'GROUP' - SERVICE = 'SERVICE', 'SERVICE' +OBSOLETE = datetime(year=1970, month=1, day=1) # Tombstone value to mark obsolete replicas +# The Enum value is the actual string stored in the database -class BadFilesStatus(DeclEnum): - BAD = 'B', 'BAD' - DELETED = 'D', 'DELETED' - LOST = 'L', 'LOST' - RECOVERED = 'R', 'RECOVERED' - SUSPICIOUS = 'S', 'SUSPICIOUS' - TEMPORARY_UNAVAILABLE = 'T', 'TEMPORARY_UNAVAILABLE' +class AccountStatus(Enum): + ACTIVE = 'ACTIVE' + SUSPENDED = 'SUSPENDED' + DELETED = 'DELETED' -class BadPFNStatus(DeclEnum): - BAD = 'B', 'BAD' - SUSPICIOUS = 'S', 'SUSPICIOUS' - TEMPORARY_UNAVAILABLE = 'T', 'TEMPORARY_UNAVAILABLE' - AVAILABLE = 'A', 'AVAILABLE' +class AccountType(Enum): + USER = 'USER' + GROUP = 'GROUP' + SERVICE = 'SERVICE' -class DIDAvailability(DeclEnum): - LOST = 'L', 'LOST' - DELETED = 'D', 'DELETED' - AVAILABLE = 'A', 'AVAILABLE' +class BadFilesStatus(Enum): + BAD = 'B' + DELETED = 'D' + LOST = 'L' + RECOVERED = 'R' + SUSPICIOUS = 'S' + TEMPORARY_UNAVAILABLE = 'T' -class DIDReEvaluation(DeclEnum): - ATTACH = 'A', 'ATTACH' - DETACH = 'D', 'DETACH' +class BadPFNStatus(Enum): + BAD = 'B' + SUSPICIOUS = 'S' + TEMPORARY_UNAVAILABLE = 'T' + AVAILABLE = 'A' -class DIDType(DeclEnum): - FILE = 'F', 'FILE' - DATASET = 'D', 'DATASET' - CONTAINER = 'C', 'CONTAINER' - ARCHIVE = 'A', 'ARCHIVE' - DELETED_FILE = 'X', 'DELETED_FILE' - DELETED_DATASET = 'Y', 'DELETED_DATASET' - DELETED_CONTAINER = 'Z', 'DELETED_CONTAINER' +class DIDAvailability(Enum): + LOST = 'L' + DELETED = 'D' + AVAILABLE = 'A' -class FTSCompleteState(DeclEnum): - OK = 'O', 'Ok' - ERROR = 'E', 'Error' +class DIDReEvaluation(Enum): + ATTACH = 'A' + DETACH = 'D' -class FTSState(DeclEnum): - SUBMITTED = 'S', 'SUBMITTED' - READY = 'R', 'READY' - ACTIVE = 'A', 'ACTIVE' - FAILED = 'F', 'FAILED' - FINISHED = 'X', 'FINISHED' - FINISHEDDIRTY = 'D', 'FINISHEDDIRTY' - CANCELED = 'C', 'CANCELED' +class DIDType(Enum): + FILE = 'F' + DATASET = 'D' + CONTAINER = 'C' + ARCHIVE = 'A' + DELETED_FILE = 'X' + DELETED_DATASET = 'Y' + DELETED_CONTAINER = 'Z' -class IdentityType(DeclEnum): - X509 = 'X509', 'X509' - GSS = 'GSS', 'GSS' - USERPASS = 'USERPASS', 'USERPASS' - SSH = 'SSH', 'SSH' - SAML = 'SAML', 'SAML' - OIDC = 'OIDC', 'OIDC' +class FTSCompleteState(Enum): + OK = 'O' + ERROR = 'E' -class KeyType(DeclEnum): - ALL = 'ALL', 'ALL' - COLLECTION = 'COLLECTION', 'COLLECTION' - CONTAINER = 'CONTAINER', 'CONTAINER' - DATASET = 'DATASET', 'DATASET' - FILE = 'FILE', 'FILE' - DERIVED = 'DERIVED', 'DERIVED' +class FTSState(Enum): + SUBMITTED = 'S' + READY = 'R' + ACTIVE = 'A' + FAILED = 'F' + FINISHED = 'X' + FINISHEDDIRTY = 'D' + CANCELED = 'C' -class LifetimeExceptionsState(DeclEnum): - APPROVED = 'A', 'APPROVED' - REJECTED = 'R', 'REJECTED' - WAITING = 'W', 'WAITING' +class IdentityType(Enum): + X509 = 'X509' + GSS = 'GSS' + USERPASS = 'USERPASS' + SSH = 'SSH' + SAML = 'SAML' + OIDC = 'OIDC' -class LockState(DeclEnum): - REPLICATING = 'R', 'REPLICATING' - OK = 'O', 'OK' - STUCK = 'S', 'STUCK' +class KeyType(Enum): + ALL = 'ALL' + COLLECTION = 'COLLECTION' + CONTAINER = 'CONTAINER' + DATASET = 'DATASET' + FILE = 'FILE' + DERIVED = 'DERIVED' -class ReplicaState(DeclEnum): - AVAILABLE = 'A', 'AVAILABLE' - UNAVAILABLE = 'U', 'UNAVAILABLE' - COPYING = 'C', 'COPYING' - BEING_DELETED = 'B', 'BEING_DELETED' - BAD = 'D', 'BAD' - TEMPORARY_UNAVAILABLE = 'T', 'TEMPORARY_UNAVAILABLE' +class LifetimeExceptionsState(Enum): + APPROVED = 'A' + REJECTED = 'R' + WAITING = 'W' -class RequestErrMsg(DeclEnum): - NO_SOURCES = 'NO_SOURCES', 'NO_SOURCES' - SUBMISSION_FAILED = 'SUBMISSION_FAILED', 'SUBMISSION_FAILED' - TRANSFER_FAILED = 'TRANSFER_FAILED', 'TRANSFER_FAILED' - MISMATCH_SCHEME = 'MISMATCH_SCHEME', 'MISMATCH_SCHEME' - OTHER = 'OTHER', 'OTHER' +class LockState(Enum): + REPLICATING = 'R' + OK = 'O' + STUCK = 'S' -class RequestState(DeclEnum): - QUEUED = 'Q', 'QUEUED' - SUBMITTING = 'G', 'SUBMITTING' - SUBMITTED = 'S', 'SUBMITTED' - FAILED = 'F', 'FAILED' - DONE = 'D', 'DONE' - LOST = 'L', 'LOST' - NO_SOURCES = 'N', 'NO_SOURCES' - ONLY_TAPE_SOURCES = 'O', 'ONLY_TAPE_SOURCES' - SUBMISSION_FAILED = 'A', 'SUBMISSION_FAILED' - MISMATCH_SCHEME = 'M', 'MISMATCH_SCHEME' - SUSPEND = 'U', 'SUSPEND' - WAITING = 'W', 'WAITING' +class ReplicaState(Enum): + AVAILABLE = 'A' + UNAVAILABLE = 'U' + COPYING = 'C' + BEING_DELETED = 'B' + BAD = 'D' + TEMPORARY_UNAVAILABLE = 'T' -class RequestType(DeclEnum): - TRANSFER = 'T', 'TRANSFER' - UPLOAD = 'U', 'UPLOAD' - DOWNLOAD = 'D', 'DOWNLOAD' - STAGEIN = 'I', 'STAGEIN' - STAGEOUT = 'O', 'STAGEOUT' +class RequestErrMsg(Enum): + NO_SOURCES = 'NO_SOURCES' + SUBMISSION_FAILED = 'SUBMISSION_FAILED' + TRANSFER_FAILED = 'TRANSFER_FAILED' + MISMATCH_SCHEME = 'MISMATCH_SCHEME' + OTHER = 'OTHER' -class RSEType(DeclEnum): - DISK = 'DISK', 'DISK' - TAPE = 'TAPE', 'TAPE' +class RequestState(Enum): + QUEUED = 'Q' + SUBMITTING = 'G' + SUBMITTED = 'S' + FAILED = 'F' + DONE = 'D' + LOST = 'L' + NO_SOURCES = 'N' + ONLY_TAPE_SOURCES = 'O' + SUBMISSION_FAILED = 'A' + MISMATCH_SCHEME = 'M' + SUSPEND = 'U' + WAITING = 'W' -class RuleGrouping(DeclEnum): - ALL = 'A', 'ALL' - DATASET = 'D', 'DATASET' - NONE = 'N', 'NONE' +class RequestType(Enum): + TRANSFER = 'T' + UPLOAD = 'U' + DOWNLOAD = 'D' + STAGEIN = 'I' + STAGEOUT = 'O' -class RuleNotification(DeclEnum): - YES = 'Y', 'YES' - NO = 'N', 'NO' - CLOSE = 'C', 'CLOSE' - PROGRESS = 'P', 'PROGRESS' +class RSEType(Enum): + DISK = 'DISK' + TAPE = 'TAPE' -class RuleState(DeclEnum): - REPLICATING = 'R', 'REPLICATING' - OK = 'O', 'OK' - STUCK = 'S', 'STUCK' - SUSPENDED = 'U', 'SUSPENDED' - WAITING_APPROVAL = 'W', 'WAITING_APPROVAL' - INJECT = 'I', 'INJECT' +class RuleGrouping(Enum): + ALL = 'A' + DATASET = 'D' + NONE = 'N' -class ScopeStatus(DeclEnum): - OPEN = 'O', 'OPEN' - CLOSED = 'C', 'CLOSED' - DELETED = 'D', 'DELETED' +class RuleNotification(Enum): + YES = 'Y' + NO = 'N' + CLOSE = 'C' + PROGRESS = 'P' -class SubscriptionState(DeclEnum): - ACTIVE = 'A', 'ACTIVE' - INACTIVE = 'I', 'INACTIVE' - NEW = 'N', 'NEW' - UPDATED = 'U', 'UPDATED' - BROKEN = 'B', 'BROKEN' +class RuleState(Enum): + REPLICATING = 'R' + OK = 'O' + STUCK = 'S' + SUSPENDED = 'U' + WAITING_APPROVAL = 'W' + INJECT = 'I' -# Individual constants +class ScopeStatus(Enum): + OPEN = 'O' + CLOSED = 'C' + DELETED = 'D' + -OBSOLETE = datetime(year=1970, month=1, day=1) # Tombstone value to mark obsolete replicas. +class SubscriptionState(Enum): + ACTIVE = 'A' + INACTIVE = 'I' + NEW = 'N' + UPDATED = 'U' + BROKEN = 'B' diff --git a/lib/rucio/db/sqla/enum.py b/lib/rucio/db/sqla/enum.py deleted file mode 100644 index a4c6d3bd35..0000000000 --- a/lib/rucio/db/sqla/enum.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright European Organization for Nuclear Research (CERN) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# You may not use this file except in compliance with the License. -# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -# -# Authors: -# - Vincent Garonne, , 2013 -# - Mario Lassnig, , 2013-2014 -# - Martin Barisits, , 2017 -# - Hannes Hansen, , 2019 -# -# PY3K COMPATIBLE - -''' -Class to handle enum type with sqlachelmy. -ref. http://techspot.zzzeek.org/2011/01/14/the-enum-recipe/ - -''' - -import uuid - -from six import add_metaclass -from sqlalchemy.types import SchemaType, TypeDecorator, Enum - -from rucio.common.exception import InvalidType - - -class EnumSymbol(object): - """Define a fixed symbol tied to a parent class.""" - - def __init__(self, cls_, name, value, description): - self.cls_ = cls_ - self.name = name - self.value = value - self.description = description - - def __reduce__(self): - """Allow unpickling to return the symbol - linked to the DeclEnum class.""" - return getattr, (self.cls_, self.name) - - def __iter__(self): - return iter([self.value, self.description]) - - def __repr__(self): - return "%s" % self.description - - -class EnumMeta(type): - """Generate new DeclEnum classes.""" - - def __init__(cls, classname, bases, dict_): # pylint: disable=E0101 - cls._reg = reg = cls._reg.copy() - cls._syms = syms = cls._syms.copy() - for k, v in dict_.items(): - if isinstance(v, tuple): - sym = reg[v[0]] = syms[v[1]] = EnumSymbol(cls, k, *v) - setattr(cls, k, sym) - return type.__init__(cls, classname, bases, dict_) - - def __iter__(cls): - return iter(cls._reg.values()) - - -@add_metaclass(EnumMeta) -class DeclEnum(object): - """Declarative enumeration.""" - - _reg = {} - _syms = {} - - @classmethod - def from_string(cls, value): - try: - return cls._reg[value] - except KeyError: - raise ValueError("Invalid value for %r: %r" % (cls.__name__, value)) - - @classmethod - def from_sym(cls, value): - try: - return cls._syms[value.upper()] - except KeyError: - raise ValueError("Invalid value for %r: %r" % (cls.__name__, value)) - - @classmethod - def values(cls): - return list(cls._reg.keys()) - - @classmethod - def db_type(cls, name=None, default=None): - return DeclEnumType(enum=cls, name=name, default=default) - - -class DeclEnumType(SchemaType, TypeDecorator): - - def __init__(self, enum, name=None, default=None): - self.enum = enum - if name is None: - self.impl = Enum(*enum.values(), native_enum=False, name='RUCIO_ENUM_' + str(uuid.uuid4())[:6]) - else: - self.impl = Enum(*enum.values(), native_enum=False, name=name) - - def _set_parent_with_dispatch(self, parent): - TypeDecorator._set_parent_with_dispatch(self, parent) - SchemaType._set_parent_with_dispatch(self, parent) - - def copy(self): - return DeclEnumType(self.enum) - - def process_bind_param(self, value, dialect): - try: - if value is None: - return None - return value.value - except AttributeError: - raise InvalidType('Invalid value/type %s for %s' % (value, self.enum)) - - def process_result_value(self, value, dialect): - if value is None: - return None - return self.enum.from_string(value.strip()) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py b/lib/rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py index 994b7c80d8..9980e98b1a 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py @@ -1,3 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Authors: +# - Martin Barisits , 2019 +# - Mario Lassnig , 2019-2020 +# - Robert Illingworth , 2019 # Copyright 2019 CERN for the benefit of the ATLAS collaboration. # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/lib/rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py b/lib/rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py index 2850030698..250187f25b 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py @@ -1,3 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Authors: +# - Vincent Garonne , 2015-2017 +# - Martin Barisits , 2016 +# - Mario Lassnig , 2019-2020 # Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,21 +54,27 @@ def upgrade(): Upgrade the database to this revision ''' - if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: - schema = context.get_context().version_table_schema if context.get_context().version_table_schema else '' + schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' + + if context.get_context().dialect.name in ['oracle', 'mysql']: add_column('requests', sa.Column('did_type', - DIDType.db_type(name='REQUESTS_DIDTYPE_CHK'), - default=DIDType.FILE), schema=schema) + sa.Enum(DIDType, name='REQUESTS_DIDTYPE_CHK'), + default=DIDType.FILE), schema=schema[:-1]) # we don't want checks on the history table, fake the DID type - add_column('requests_history', sa.Column('did_type', sa.String(1)), schema=schema) + add_column('requests_history', sa.Column('did_type', sa.String(1)), schema=schema[:-1]) + elif context.get_context().dialect.name in ['oracle', 'mysql']: + op.execute("ALTER TABLE %srequests ADD COLUMN did_type REQUESTS_DIDTYPE_CHK" % schema) + # we don't want checks on the history table, fake the DID type + add_column('requests_history', sa.Column('did_type', sa.String(1)), schema=schema[:-1]) def downgrade(): ''' Downgrade the database to the previous revision ''' + schema = context.get_context().version_table_schema if context.get_context().version_table_schema else '' + if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: - schema = context.get_context().version_table_schema if context.get_context().version_table_schema else '' drop_column('requests', 'did_type', schema=schema) drop_column('requests_history', 'did_type', schema=schema) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py b/lib/rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py index 262bc6fee5..9649da60cf 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +15,9 @@ # # Authors: # - Vincent Garonne , 2015-2017 -# - Mario Lassnig , 2019 +# - Martin Barisits , 2016 +# - Mario Lassnig , 2019-2020 +# - James Perry , 2020 ''' add convention table and closed_at to dids ''' @@ -47,7 +50,7 @@ def upgrade(): create_table('naming_conventions', sa.Column('scope', sa.String(get_schema_value('SCOPE_LENGTH'))), sa.Column('regexp', sa.String(255)), - sa.Column('convention_type', KeyType.db_type()), + sa.Column('convention_type', sa.Enum(KeyType, name='CVT_TYPE_CHK')), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) create_primary_key('NAMING_CONVENTIONS_PK', 'naming_conventions', ['scope']) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py b/lib/rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py index e023cf6a19..0fcf5a9392 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,9 +14,9 @@ # limitations under the License. # # Authors: -# - Martin Barisits , 2015 -# - Vincent Garonne , 2017 -# - Mario Lassnig , 2019 +# - Vincent Garonne , 2015-2017 +# - Martin Barisits , 2016 +# - Mario Lassnig , 2019-2020 ''' created rule history tables ''' @@ -48,8 +49,8 @@ def upgrade(): sa.Column('account', sa.String(25)), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), - sa.Column('did_type', DIDType.db_type()), - sa.Column('state', RuleState.db_type()), + sa.Column('did_type', sa.Enum(DIDType, name='RULES_HIST_RECENT_DIDTYPE_CHK')), + sa.Column('state', sa.Enum(RuleState, name='RULES_HIST_RECENT_STATE_CHK')), sa.Column('error', sa.String(255)), sa.Column('rse_expression', sa.String(255)), sa.Column('copies', sa.SmallInteger), @@ -61,8 +62,8 @@ def upgrade(): sa.Column('locks_stuck_cnt', sa.BigInteger), sa.Column('source_replica_expression', sa.String(255)), sa.Column('activity', sa.String(50)), - sa.Column('grouping', RuleGrouping.db_type()), - sa.Column('notification', RuleNotification.db_type()), + sa.Column('grouping', sa.Enum(RuleGrouping, name='RULES_HIST_RECENT_GROUPING_CHK')), + sa.Column('notification', sa.Enum(RuleNotification, name='RULES_HIST_RECENT_NOTIFY_CHK')), sa.Column('stuck_at', sa.DateTime), sa.Column('purge_replicas', sa.Boolean()), sa.Column('ignore_availability', sa.Boolean()), @@ -76,8 +77,8 @@ def upgrade(): sa.Column('account', sa.String(25)), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), - sa.Column('did_type', DIDType.db_type()), - sa.Column('state', RuleState.db_type()), + sa.Column('did_type', sa.Enum(DIDType, name='RULES_HISTORY_DIDTYPE_CHK')), + sa.Column('state', sa.Enum(RuleState, name='RULES_HISTORY_STATE_CHK')), sa.Column('error', sa.String(255)), sa.Column('rse_expression', sa.String(255)), sa.Column('copies', sa.SmallInteger), @@ -89,8 +90,8 @@ def upgrade(): sa.Column('locks_stuck_cnt', sa.BigInteger), sa.Column('source_replica_expression', sa.String(255)), sa.Column('activity', sa.String(50)), - sa.Column('grouping', RuleGrouping.db_type()), - sa.Column('notification', RuleNotification.db_type()), + sa.Column('grouping', sa.Enum(RuleGrouping, name='RULES_HISTORY_GROUPING_CHK')), + sa.Column('notification', sa.Enum(RuleNotification, name='RULES_HISTORY_NOTIFY_CHK')), sa.Column('stuck_at', sa.DateTime), sa.Column('purge_replicas', sa.Boolean()), sa.Column('ignore_availability', sa.Boolean()), diff --git a/lib/rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py b/lib/rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py index c9f49a3e85..25e7429a40 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,9 +14,9 @@ # limitations under the License. # # Authors: -# - Martin Barisits , 2015 -# - Vincent Garonne , 2017 -# - Mario Lassnig , 2019 +# - Vincent Garonne , 2015-2017 +# - Martin Barisits , 2016 +# - Mario Lassnig , 2019-2020 ''' create collection_replicas_updates table ''' @@ -51,7 +52,7 @@ def upgrade(): sa.Column('id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), - sa.Column('did_type', DIDType.db_type()), + sa.Column('did_type', sa.Enum(DIDType, name='UPDATED_COL_REP_TYPE_CHK')), sa.Column('rse_id', GUID()), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py b/lib/rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py index dab88624b0..a9346d5b99 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,15 +14,15 @@ # limitations under the License. # # Authors: -# - Martin Barisits , 2014 -# - Vincent Garonne , 2017 -# - Mario Lassnig , 2019 +# - Vincent Garonne , 2015-2017 +# - Martin Barisits , 2016 +# - Mario Lassnig , 2019-2020 ''' add notification column to rules ''' import sqlalchemy as sa -from alembic import context +from alembic import context, op from alembic.op import add_column, drop_constraint, drop_column from rucio.db.sqla.constants import RuleNotification @@ -37,22 +38,24 @@ def upgrade(): Upgrade the database to this revision ''' - if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']: - schema = context.get_context().version_table_schema if context.get_context().version_table_schema else '' - add_column('rules', sa.Column('notification', RuleNotification.db_type(name='RULES_NOTIFICATION_CHK'), - default=RuleNotification.NO), schema=schema) + schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' + if context.get_context().dialect.name in ['oracle', 'mysql']: + add_column('rules', sa.Column('notification', sa.Enum(RuleNotification, name='RULES_NOTIFICATION_CHK'), default=RuleNotification.NO), schema=schema[:-1]) + elif context.get_context().dialect.name == 'postgresql': + op.execute("CREATE TYPE RULES_NOTIFICATION_CHK AS ENUM('Y', 'N', 'C', 'P')") + op.execute("ALTER TABLE %srules ADD COLUMN notification RULES_NOTIFICATION_CHK" % schema) def downgrade(): ''' Downgrade the database to the previous revision ''' - schema = context.get_context().version_table_schema if context.get_context().version_table_schema else '' + schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' if context.get_context().dialect.name in ['oracle', 'postgresql']: drop_constraint('RULES_NOTIFICATION_CHK', 'rules', type_='check') - drop_column('rules', 'notification', schema=schema) + drop_column('rules', 'notification', schema=schema[:-1]) elif context.get_context().dialect.name == 'mysql': - drop_column('rules', 'notification', schema=schema) + drop_column('rules', 'notification', schema=schema[:-1]) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py b/lib/rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py index 2cb930e91e..e11c59234d 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,9 +14,9 @@ # limitations under the License. # # Authors: -# - Martin Barisits , 2015-2019 -# - Vincent Garonne , 2017 -# - Mario Lassnig , 2019 +# - Vincent Garonne , 2015-2017 +# - Martin Barisits , 2016-2019 +# - Mario Lassnig , 2019-2020 ''' create collection replica table ''' @@ -46,11 +47,11 @@ def upgrade(): create_table('collection_replicas', sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), - sa.Column('did_type', DIDType.db_type()), + sa.Column('did_type', sa.Enum(DIDType, name='COLLECTION_REPLICAS_TYPE_CHK')), sa.Column('rse_id', GUID()), sa.Column('bytes', sa.BigInteger), sa.Column('length', sa.BigInteger), - sa.Column('state', ReplicaState.db_type(), default=ReplicaState.UNAVAILABLE), + sa.Column('state', sa.Enum(ReplicaState, name='COLLECTION_REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE), sa.Column('accessed_at', sa.DateTime), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)) @@ -60,7 +61,6 @@ def upgrade(): create_foreign_key('COLLECTION_REPLICAS_RSE_ID_FK', 'collection_replicas', 'rses', ['rse_id'], ['id']) create_check_constraint('COLLECTION_REPLICAS_SIZE_NN', 'collection_replicas', 'bytes IS NOT NULL') create_check_constraint('COLLECTION_REPLICAS_STATE_NN', 'collection_replicas', 'state IS NOT NULL') - create_check_constraint('COLLECTION_REPLICAS_STATE_CHK', 'collection_replicas', "state in ('A', 'U', 'C', 'B', 'D', 'S')") create_index('COLLECTION_REPLICAS_RSE_ID_IDX', 'collection_replicas', ['rse_id']) @@ -75,9 +75,9 @@ def downgrade(): elif context.get_context().dialect.name == 'postgresql': schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' - op.execute('ALTER TABLE ' + schema + 'collection_replicas ALTER COLUMN state TYPE CHAR') # pylint: disable=no-member - drop_constraint('COLLECTION_REPLICAS_STATE_CHK', 'collection_replicas', type_='check') drop_table('collection_replicas') + op.execute('DROP TYPE "COLLECTION_REPLICAS_TYPE_CHK"') + op.execute('DROP TYPE "COLLECTION_REPLICAS_STATE_CHK"') elif context.get_context().dialect.name == 'mysql': drop_table('collection_replicas') diff --git a/lib/rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py b/lib/rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py index 7dee629e29..3d3ad38b3a 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +14,9 @@ # limitations under the License. # # Authors: -# - Ruturaj Gujar, , 2019 +# - Ruturaj Gujar , 2019 +# - Martin Barisits , 2019 +# - Mario Lassnig , 2020 ''' add DidsFollowed and FollowEvents table ''' @@ -41,7 +44,7 @@ def upgrade(): sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('account', sa.String(25)), - sa.Column('did_type', DIDType.db_type(name='DIDS_FOLLOWED_TYPE_CHK')), + sa.Column('did_type', sa.Enum(DIDType, name='DIDS_FOLLOWED_TYPE_CHK')), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow)) @@ -61,7 +64,7 @@ def upgrade(): sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('account', sa.String(25)), - sa.Column('did_type', DIDType.db_type(name='DIDS_FOLLOWED_EVENTS_TYPE_CHK')), + sa.Column('did_type', sa.Enum(DIDType, name='DIDS_FOLLOWED_EVENTS_TYPE_CHK')), sa.Column('event_type', sa.String(1024)), sa.Column('payload', sa.Text), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), diff --git a/lib/rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py b/lib/rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py index d1fdbb9460..b36f2d3452 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2016-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,9 +14,9 @@ # limitations under the License. # # Authors: -# - Vincent Garonne , 2015-2016 # - Cedric Serfon , 2016 -# - Mario Lassnig , 2019 +# - Vincent Garonne , 2017 +# - Mario Lassnig , 2019-2020 ''' new table for lifetime model exceptions ''' @@ -46,11 +47,11 @@ def upgrade(): sa.Column('id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), - sa.Column('did_type', DIDType.db_type()), + sa.Column('did_type', sa.Enum(DIDType, name='LIFETIME_EXCEPT_TYPE_CHK')), sa.Column('account', sa.String(25)), sa.Column('comments', sa.String(4000)), sa.Column('pattern', sa.String(255)), - sa.Column('state', LifetimeExceptionsState.db_type()), + sa.Column('state', sa.Enum(LifetimeExceptionsState, name='LIFETIME_EXCEPT_STATE_CHK')), sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow), sa.Column('expires_at', sa.DateTime)) diff --git a/lib/rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py b/lib/rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py index 346769da06..2e990a5de0 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py @@ -1,3 +1,21 @@ +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Authors: +# - Martin Barisits , 2019 +# - Mario Lassnig , 2019-2020 # Copyright 2019 CERN for the benefit of the ATLAS collaboration. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -34,7 +52,7 @@ def upgrade(): schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' - if context.get_context().dialect.name in ['oracle', 'postgresql']: + if context.get_context().dialect.name == 'oracle': drop_constraint('REPLICAS_STATE_CHK', 'replicas', type_='check') create_check_constraint(constraint_name='REPLICAS_STATE_CHK', table_name='replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'T')") @@ -42,6 +60,9 @@ def upgrade(): create_check_constraint(constraint_name='COLLECTION_REPLICAS_STATE_CHK', table_name='collection_replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'T')") + elif context.get_context().dialect.name == 'postgresql': + pass # too complicated in PostgreSQL -- leave the ENUM as it is + elif context.get_context().dialect.name == 'mysql' and context.get_context().dialect.server_version_info[0] == 5: create_check_constraint(constraint_name='REPLICAS_STATE_CHK', table_name='replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'T')") @@ -64,7 +85,7 @@ def downgrade(): schema = context.get_context().version_table_schema + '.' if context.get_context().version_table_schema else '' - if context.get_context().dialect.name in ['oracle', 'postgresql']: + if context.get_context().dialect.name == 'oracle': drop_constraint('REPLICAS_STATE_CHK', 'replicas', type_='check') create_check_constraint(constraint_name='REPLICAS_STATE_CHK', table_name='replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'S', 'T')") @@ -72,6 +93,9 @@ def downgrade(): create_check_constraint(constraint_name='COLLECTION_REPLICAS_STATE_CHK', table_name='collection_replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'S', 'T')") + elif context.get_context().dialect.name == 'postgresql': + pass # too complicated in PostgreSQL -- leave the ENUM as it is + elif context.get_context().dialect.name == 'mysql' and context.get_context().dialect.server_version_info[0] == 5: create_check_constraint(constraint_name='REPLICAS_STATE_CHK', table_name='replicas', condition="state in ('A', 'U', 'C', 'B', 'D', 'S', 'T')") diff --git a/lib/rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py b/lib/rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py index a7e96f1967..187a149c9f 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py @@ -1,4 +1,5 @@ -# Copyright 2018-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +15,7 @@ # # Authors: # - Martin Barisits , 2018-2019 -# - Mario Lassnig , 2019 +# - Mario Lassnig , 2019-2020 # - Robert Illingworth , 2019 ''' new bad_pfns table and bad_replicas changes ''' @@ -48,7 +49,7 @@ def upgrade(): # Create new bad_pfns table create_table('bad_pfns', sa.Column('path', sa.String(2048)), - sa.Column('state', BadPFNStatus.db_type(name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), + sa.Column('state', sa.Enum(BadPFNStatus, name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), sa.Column('reason', sa.String(255)), sa.Column('account', sa.String(25)), sa.Column('expires_at', sa.DateTime), @@ -76,7 +77,7 @@ def upgrade(): # Create new bad_pfns table create_table('bad_pfns', sa.Column('path', sa.String(2048)), - sa.Column('state', BadPFNStatus.db_type(name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), + sa.Column('state', sa.Enum(BadPFNStatus, name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), sa.Column('reason', sa.String(255)), sa.Column('account', sa.String(25)), sa.Column('expires_at', sa.DateTime), @@ -103,7 +104,7 @@ def upgrade(): # Create new bad_pfns table create_table('bad_pfns', sa.Column('path', sa.String(2048)), - sa.Column('state', BadPFNStatus.db_type(name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), + sa.Column('state', sa.Enum(BadPFNStatus, name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS), sa.Column('reason', sa.String(255)), sa.Column('account', sa.String(25)), sa.Column('expires_at', sa.DateTime), diff --git a/lib/rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py b/lib/rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py index 2240763572..36a1a1b779 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +14,9 @@ # limitations under the License. # # Authors: -# - Jaroslav Guenther , 2019 +# - Jaroslav Guenther , 2019-2020 +# - Martin Barisits , 2020 +# - Mario Lassnig , 2020 ''' OAuth2.0 and JWT feature support; adding table oauth_requests & several columns to tokens table ''' @@ -160,12 +163,10 @@ def downgrade(): elif context.get_context().dialect.name == 'postgresql': # pylint: disable=no-member - drop_constraint('IDENTITIES_TYPE_CHK', 'identities', type_='check') create_check_constraint(constraint_name='IDENTITIES_TYPE_CHK', table_name='identities', condition="identity_type in ('X509', 'GSS', 'USERPASS', 'SSH', 'SAML')") - drop_constraint('ACCOUNT_MAP_ID_TYPE_CHK', 'account_map', type_='check') create_check_constraint(constraint_name='ACCOUNT_MAP_ID_TYPE_CHK', table_name='account_map', condition="identity_type in ('X509', 'GSS', 'USERPASS', 'SSH', 'SAML')") diff --git a/lib/rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py b/lib/rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py index 9dd8a6bcc3..a1278c3675 100644 --- a/lib/rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py +++ b/lib/rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py @@ -1,4 +1,5 @@ -# Copyright 2013-2019 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2019-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,121 +15,19 @@ # # Authors: # - Robert Illingworth , 2019 +# - Mario Lassnig , 2020 ''' postgres_use_check_constraints ''' -from alembic import context -from alembic.op import (create_foreign_key, drop_constraint, execute) - - # Alembic revision identifiers revision = 'f1b14a8c2ac1' down_revision = 'b8caac94d7f0' def upgrade(): - ''' - Upgrade the database to this revision - ''' - - if context.get_context().dialect.name != 'postgresql': - return - - # depending on the creation/migration history of the schema it may or - # may not already be using check constraints. - - schema = context.get_context().version_table_schema if context.get_context().version_table_schema else None - - # drop foreign keys where the type changes - drop_constraint('ACCOUNT_MAP_ID_TYPE_FK', 'account_map', schema=schema, type_='foreignkey') - - did_types = ('A', 'C', 'D', 'F', 'Y', 'X', 'Z') - types_to_drop = set() - for table, column, constraint_name, constraint in ( - ('account_map', 'identity_type', "ACCOUNT_MAP_ID_TYPE_CHK", ('X509', 'GSS', 'USERPASS', 'SSH')), - ('accounts', 'account_type', "ACCOUNTS_TYPE_CHK", ('GROUP', 'USER', 'SERVICE')), - ('accounts', 'status', "ACCOUNTS_STATUS_CHK", ('ACTIVE', 'DELETED', 'SUSPENDED')), - ('bad_pfns', 'state', 'BAD_PFNS_STATE_CHK', ('A', 'S', 'B', 'T')), - ('bad_replicas', 'state', 'BAD_REPLICAS_STATE_CHK', ('B', 'D', 'L', 'S', 'R', 'T')), - ('collection_replicas', 'did_type', "COLLECTION_REPLICAS_TYPE_CHK", did_types), - ('collection_replicas', 'state', "COLLECTION_REPLICAS_STATE_CHK", ('A', 'C', 'B', 'D', 'S', 'U', 'T')), - ('contents', 'did_type', "CONTENTS_DID_TYPE_CHK", did_types), - ('contents', 'child_type', "CONTENTS_CHILD_TYPE_CHK", ('A', 'C', 'D', 'F', 'Y', 'X', 'Z')), - ('contents_history', 'did_type', "CONTENTS_HIST_DID_TYPE_CHK", did_types), - ('contents_history', 'child_type', "CONTENTS_HIST_CHILD_TYPE_CHK", ('A', 'C', 'D', 'F', 'Y', 'X', 'Z')), - ('naming_conventions', 'convention_type', "CVT_TYPE_CHK", ('ALL', 'CONTAINER', 'DERIVED', 'COLLECTION', 'DATASET', 'FILE')), - ('dataset_locks', 'state', "DATASET_LOCKS_STATE_CHK", ('S', 'R', 'O')), - ('deleted_dids', 'did_type', "DEL_DIDS_TYPE_CHK", did_types), - ('deleted_dids', 'availability', "DEL_DIDS_AVAIL_CHK", ('A', 'D', 'L')), - ('did_keys', 'key_type', "DID_KEYS_TYPE_CHK", ('ALL', 'CONTAINER', 'DERIVED', 'COLLECTION', 'DATASET', 'FILE')), - ('dids', 'did_type', "DIDS_TYPE_CHK", did_types), - ('dids', 'availability', "DIDS_AVAILABILITY_CHK", ('A', 'D', 'L')), - ('identities', 'identity_type', "IDENTITIES_TYPE_CHK", ('X509', 'GSS', 'USERPASS', 'SSH')), - ('lifetime_except', 'did_type', "LIFETIME_EXCEPT_TYPE_CHK", did_types), - ('lifetime_except', 'state', "LIFETIME_EXCEPT_STATE_CHK", ('A', 'R', 'W')), - ('locks', 'state', "LOCKS_STATE_CHK", ('S', 'R', 'O')), - ('replicas', 'state', 'REPLICAS_STATE_CHK', ('A', 'U', 'C', 'B', 'D', 'S', 'T')), - ('requests', 'request_type', "REQUESTS_TYPE_CHK", ('I', 'U', 'T', 'O', 'D')), - ('requests', 'did_type', "REQUESTS_DIDTYPE_CHK", did_types), - ('requests', 'state', "REQUESTS_STATE_CHK", ('A', 'D', 'G', 'F', 'M', 'L', 'O', 'N', 'Q', 'S', 'U', 'W')), - ('rses', 'rse_type', "RSES_TYPE_CHK", ('DISK', 'TAPE')), - ('rules', 'did_type', "RULES_DID_TYPE_CHK", did_types), - ('rules', 'state', "RULES_STATE_CHK", ('I', 'O', 'S', 'R', 'U', 'W')), - ('rules', 'grouping', "RULES_GROUPING_CHK", ('A', 'D', 'N')), - ('rules', 'notification', "RULES_NOTIFICATION_CHK", ('Y', 'P', 'C', 'N')), - ('rules_history', 'did_type', "RULES_HISTORY_DIDTYPE_CHK", did_types), - ('rules_history', 'state', "RULES_HISTORY_STATE_CHK", ('I', 'O', 'S', 'R', 'U', 'W')), - ('rules_history', 'grouping', "RULES_HISTORY_GROUPING_CHK", ('A', 'D', 'N')), - ('rules_history', 'notification', "RULES_HISTORY_NOTIFY_CHK", ('Y', 'P', 'C', 'N')), - ('rules_hist_recent', 'did_type', "RULES_HIST_RECENT_DIDTYPE_CHK", did_types), - ('rules_hist_recent', 'state', "RULES_HIST_RECENT_STATE_CHK", ('I', 'O', 'S', 'R', 'U', 'W')), - ('rules_hist_recent', 'grouping', "RULES_HIST_RECENT_GROUPING_CHK", ('A', 'D', 'N')), - ('rules_hist_recent', 'notification', "RULES_HIST_RECENT_NOTIFY_CHK", ('Y', 'P', 'C', 'N')), - ('scopes', 'status', "SCOPE_STATUS_CHK", ('C', 'D', 'O')), - ('subscriptions', 'state', "SUBSCRIPTIONS_STATE_CHK", ('I', 'A', 'B', 'U', 'N')), - ('updated_col_rep', 'did_type', "UPDATED_COL_REP_TYPE_CHK", did_types), - ('updated_dids', 'rule_evaluation_action', "UPDATED_DIDS_RULE_EVAL_ACT_CHK", ('A', 'D')), - ): - - args = {'schema': '%s.' % schema if schema else '', - 'table': table, - 'column': column, - 'constraint_name': constraint_name, - 'constraint': ', '.join("'%s'" % con for con in constraint), - 'size': max(len(con) for con in constraint) - } - execute("""ALTER TABLE %(schema)s%(table)s - DROP CONSTRAINT IF EXISTS "%(constraint_name)s", - ALTER COLUMN %(column)s TYPE varchar(%(size)d) USING %(column)s::text, - ADD CONSTRAINT "%(constraint_name)s" CHECK (%(column)s in (%(constraint)s))""" % args) - types_to_drop.add(constraint_name) - - # four history tables - for table, column, in ( - ('requests_history', 'did_type'), - ('requests_history', 'request_type'), - ('requests_history', 'state'), - ('subscriptions_history', 'state'), - ): - args = {'schema': '%s.' % schema if schema else '', - 'table': table, - 'column': column, - 'size': 1, - } - execute("""ALTER TABLE %(schema)s%(table)s - ALTER COLUMN %(column)s TYPE varchar(%(size)d) USING %(column)s::text""" % args) - - # put back the foreign keys - create_foreign_key('ACCOUNT_MAP_ID_TYPE_FK', 'account_map', 'identities', ['identity', 'identity_type'], ['identity', 'identity_type'], source_schema=schema, referent_schema=schema) - - # now drop the types - nothing should use them now - for constraint_name in types_to_drop: - execute('DROP TYPE IF EXISTS "%s"' % constraint_name) - + # not needed anymore after SQLAlchemy 1.3.8 + pass def downgrade(): - ''' - Downgrade the database to the previous revision - ''' - - # downgrading is too complex. The upgrade should be idempotent, so down then up will still work. + # not needed anymore after SQLAlchemy 1.3.8 + pass diff --git a/lib/rucio/db/sqla/models.py b/lib/rucio/db/sqla/models.py index 490e174352..e25b7226e1 100644 --- a/lib/rucio/db/sqla/models.py +++ b/lib/rucio/db/sqla/models.py @@ -1,4 +1,5 @@ -# Copyright 2015-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,14 +14,12 @@ # limitations under the License. # # Authors: -# - Vincent Garonne, , 2012-2017 -# - Mario Lassnig, , 2012-2020 -# - Angelos Molfetas, , 2012 -# - Ralph Vigne, , 2013 -# - Cedric Serfon, , 2013-2020 -# - Martin Barisits, , 2013-2020 -# - Wen Guan, , 2015 -# - Joaquin Bogado , 2015-2019 +# - Vincent Garonne , 2015-2017 +# - Joaquín Bogado , 2015-2019 +# - Wen Guan , 2015 +# - Martin Barisits , 2015-2020 +# - Cedric Serfon , 2016-2020 +# - Mario Lassnig , 2017-2020 # - asket , 2018 # - Hannes Hansen , 2019 # - Andrew Lister , 2019 @@ -29,15 +28,13 @@ # - Eli Chadwick , 2020 # - James Perry , 2020 # - Benedikt Ziemons , 2020 -# -# PY3K COMPATIBLE import datetime import sys import uuid from six import iteritems -from sqlalchemy import BigInteger, Boolean, Column, DateTime, Float, Integer, SmallInteger, String as _String, Text, event, UniqueConstraint +from sqlalchemy import BigInteger, Boolean, Column, DateTime, Enum, Float, Integer, SmallInteger, String as _String, Text, event, UniqueConstraint from sqlalchemy.engine import Engine from sqlalchemy.ext.compiler import compiles from sqlalchemy.ext.declarative import declared_attr @@ -85,14 +82,9 @@ def compile_binary_oracle(type_, compiler, **kw): return "NUMBER(1)" -# PostgreSQL expects foreign keys to have the same type. -# Unfortunately, SQLAlchemy propagates the name into the type for the PostgreSQL driver, -# For now, we need to force rename that one case where this happens. @event.listens_for(Table, "before_create") def _psql_rename_type(target, connection, **kw): - if connection.dialect.name == 'postgresql' and target.name == 'account_map': - target.columns.identity_type.type.impl.name = 'IDENTITIES_TYPE_CHK' - elif connection.dialect.name == 'mysql' and target.name == 'quarantined_replicas_history': + if connection.dialect.name == 'mysql' and target.name == 'quarantined_replicas_history': target.columns.path.type = String(255) elif connection.dialect.name == 'mysql' and target.name == 'quarantined_replicas': target.columns.path.type = String(255) @@ -295,8 +287,8 @@ class Account(BASE, ModelBase): """Represents an account""" __tablename__ = 'accounts' account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - account_type = Column(AccountType.db_type(name='ACCOUNTS_TYPE_CHK')) - status = Column(AccountStatus.db_type(default=AccountStatus.ACTIVE, name='ACCOUNTS_STATUS_CHK')) + account_type = Column(Enum(AccountType, name='ACCOUNTS_TYPE_CHK')) + status = Column(Enum(AccountStatus, name='ACCOUNTS_STATUS_CHK'), default=AccountStatus.ACTIVE, ) email = Column(String(255)) suspended_at = Column(DateTime) deleted_at = Column(DateTime) @@ -320,7 +312,7 @@ class Identity(BASE, SoftModelBase): """Represents an identity""" __tablename__ = 'identities' identity = Column(String(2048)) - identity_type = Column(IdentityType.db_type(name='IDENTITIES_TYPE_CHK')) + identity_type = Column(Enum(IdentityType, name='IDENTITIES_TYPE_CHK')) username = Column(String(255)) password = Column(String(255)) salt = Column(LargeBinary(255)) @@ -334,7 +326,7 @@ class IdentityAccountAssociation(BASE, ModelBase): """Represents a map account-identity""" __tablename__ = 'account_map' identity = Column(String(2048)) - identity_type = Column(IdentityType.db_type(name='ACCOUNT_MAP_ID_TYPE_CHK')) + identity_type = Column(Enum(IdentityType, name='IDENTITIES_TYPE_CHK')) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) is_default = Column(Boolean(name='ACCOUNT_MAP_DEFAULT_CHK'), default=False) _table_args = (PrimaryKeyConstraint('identity', 'identity_type', 'account', name='ACCOUNT_MAP_PK'), @@ -350,7 +342,7 @@ class Scope(BASE, ModelBase): scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) is_default = Column(Boolean(name='SCOPES_DEFAULT_CHK'), default=False) - status = Column(ScopeStatus.db_type(name='SCOPE_STATUS_CHK', default=ScopeStatus.OPEN)) + status = Column(Enum(ScopeStatus, name='SCOPE_STATUS_CHK'), default=ScopeStatus.OPEN) closed_at = Column(DateTime) deleted_at = Column(DateTime) _table_args = (PrimaryKeyConstraint('scope', name='SCOPES_SCOPE_PK'), @@ -366,14 +358,14 @@ class DataIdentifier(BASE, ModelBase): scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - did_type = Column(DIDType.db_type(name='DIDS_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='DIDS_TYPE_CHK')) is_open = Column(Boolean(name='DIDS_IS_OPEN_CHK')) monotonic = Column(Boolean(name='DIDS_MONOTONIC_CHK'), server_default='0') hidden = Column(Boolean(name='DIDS_HIDDEN_CHK'), server_default='0') obsolete = Column(Boolean(name='DIDS_OBSOLETE_CHK'), server_default='0') complete = Column(Boolean(name='DIDS_COMPLETE_CHK'), server_default=None) is_new = Column(Boolean(name='DIDS_IS_NEW_CHK'), server_default='1') - availability = Column(DIDAvailability.db_type(name='DIDS_AVAILABILITY_CHK'), + availability = Column(Enum(DIDAvailability, name='DIDS_AVAILABILITY_CHK'), default=DIDAvailability.AVAILABLE) suppressed = Column(Boolean(name='FILES_SUPP_CHK'), server_default='0') bytes = Column(BigInteger) @@ -433,14 +425,14 @@ class DeletedDataIdentifier(BASE, ModelBase): scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - did_type = Column(DIDType.db_type(name='DEL_DIDS_TYPE_CHK')) + did_type = Column(Enum(DIDType ,name='DEL_DIDS_TYPE_CHK')) is_open = Column(Boolean(name='DEL_DIDS_IS_OPEN_CHK')) monotonic = Column(Boolean(name='DEL_DIDS_MONO_CHK'), server_default='0') hidden = Column(Boolean(name='DEL_DIDS_HIDDEN_CHK'), server_default='0') obsolete = Column(Boolean(name='DEL_DIDS_OBSOLETE_CHK'), server_default='0') complete = Column(Boolean(name='DEL_DIDS_COMPLETE_CHK')) is_new = Column(Boolean(name='DEL_DIDS_IS_NEW_CHK'), server_default='1') - availability = Column(DIDAvailability.db_type(name='DEL_DIDS_AVAIL_CHK'), + availability = Column(Enum(DIDAvailability, name='DEL_DIDS_AVAIL_CHK'), default=DIDAvailability.AVAILABLE) suppressed = Column(Boolean(name='DEL_FILES_SUPP_CHK'), server_default='0') bytes = Column(BigInteger) @@ -480,7 +472,7 @@ class UpdatedDID(BASE, ModelBase): id = Column(GUID(), default=utils.generate_uuid) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - rule_evaluation_action = Column(DIDReEvaluation.db_type(name='UPDATED_DIDS_RULE_EVAL_ACT_CHK')) + rule_evaluation_action = Column(Enum(DIDReEvaluation ,name='UPDATED_DIDS_RULE_EVAL_ACT_CHK')) _table_args = (PrimaryKeyConstraint('id', name='UPDATED_DIDS_PK'), CheckConstraint('SCOPE IS NOT NULL', name='UPDATED_DIDS_SCOPE_NN'), CheckConstraint('NAME IS NOT NULL', name='UPDATED_DIDS_NAME_NN'), @@ -494,7 +486,7 @@ class BadReplicas(BASE, ModelBase): name = Column(String(get_schema_value('NAME_LENGTH'))) rse_id = Column(GUID()) reason = Column(String(255)) - state = Column(BadFilesStatus.db_type(name='BAD_REPLICAS_STATE_CHK'), default=BadFilesStatus.SUSPICIOUS) + state = Column(Enum(BadFilesStatus, name='BAD_REPLICAS_STATE_CHK'), default=BadFilesStatus.SUSPICIOUS) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) bytes = Column(BigInteger) expires_at = Column(DateTime) @@ -512,7 +504,7 @@ class BadPFNs(BASE, ModelBase): """Represents bad, suspicious or temporary unavailable PFNs which have to be processed and added to BadReplicas Table""" __tablename__ = 'bad_pfns' path = Column(String(2048)) # PREFIX + PFN - state = Column(BadPFNStatus.db_type(name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS) + state = Column(Enum(BadPFNStatus, name='BAD_PFNS_STATE_CHK'), default=BadPFNStatus.SUSPICIOUS) reason = Column(String(255)) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) expires_at = Column(DateTime) @@ -540,7 +532,7 @@ class DIDKey(BASE, ModelBase): __tablename__ = 'did_keys' key = Column(String(255)) is_enum = Column(Boolean(name='DID_KEYS_IS_ENUM_CHK'), server_default='0') - key_type = Column(KeyType.db_type(name='DID_KEYS_TYPE_CHK')) + key_type = Column(Enum(KeyType, name='DID_KEYS_TYPE_CHK')) value_type = Column(String(255)) value_regexp = Column(String(255)) _table_args = (PrimaryKeyConstraint('key', name='DID_KEYS_PK'), @@ -564,8 +556,8 @@ class DataIdentifierAssociation(BASE, ModelBase): name = Column(String(get_schema_value('NAME_LENGTH'))) # dataset name child_scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) # Provenance scope child_name = Column(String(get_schema_value('NAME_LENGTH'))) # Provenance name - did_type = Column(DIDType.db_type(name='CONTENTS_DID_TYPE_CHK')) - child_type = Column(DIDType.db_type(name='CONTENTS_CHILD_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='CONTENTS_DID_TYPE_CHK')) + child_type = Column(Enum(DIDType, name='CONTENTS_CHILD_TYPE_CHK')) bytes = Column(BigInteger) adler32 = Column(String(8)) md5 = Column(String(32)) @@ -631,8 +623,8 @@ class DataIdentifierAssociationHistory(BASE, ModelBase): name = Column(String(get_schema_value('NAME_LENGTH'))) # dataset name child_scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) # Provenance scope child_name = Column(String(get_schema_value('NAME_LENGTH'))) # Provenance name - did_type = Column(DIDType.db_type(name='CONTENTS_HIST_DID_TYPE_CHK')) - child_type = Column(DIDType.db_type(name='CONTENTS_HIST_CHILD_TYPE_CHK')) + did_type = Column(Enum(DIDType ,name='CONTENTS_HIST_DID_TYPE_CHK')) + child_type = Column(Enum(DIDType, name='CONTENTS_HIST_CHILD_TYPE_CHK')) bytes = Column(BigInteger) adler32 = Column(String(8)) md5 = Column(String(32)) @@ -656,7 +648,7 @@ class RSE(BASE, SoftModelBase): id = Column(GUID(), default=utils.generate_uuid) rse = Column(String(255)) vo = Column(String(3), nullable=False, server_default='def') - rse_type = Column(RSEType.db_type(name='RSES_TYPE_CHK'), default=RSEType.DISK) + rse_type = Column(Enum(RSEType, name='RSES_TYPE_CHK'), default=RSEType.DISK) deterministic = Column(Boolean(name='RSE_DETERMINISTIC_CHK'), default=True) volatile = Column(Boolean(name='RSE_VOLATILE_CHK'), default=False) staging_area = Column(Boolean(name='RSE_STAGING_AREA_CHK'), default=False) @@ -820,7 +812,7 @@ class RSEFileAssociation(BASE, ModelBase): md5 = Column(String(32)) adler32 = Column(String(8)) path = Column(String(1024)) - state = Column(ReplicaState.db_type(name='REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE) + state = Column(Enum(ReplicaState, name='REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE) lock_cnt = Column(Integer, server_default='0') accessed_at = Column(DateTime) tombstone = Column(DateTime) @@ -841,13 +833,13 @@ class CollectionReplica(BASE, ModelBase): __tablename__ = 'collection_replicas' scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='COLLECTION_REPLICAS_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='COLLECTION_REPLICAS_TYPE_CHK')) rse_id = Column(GUID()) bytes = Column(BigInteger) length = Column(BigInteger) available_bytes = Column(BigInteger) available_replicas_cnt = Column(BigInteger) - state = Column(ReplicaState.db_type(name='COLLECTION_REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE) + state = Column(Enum(ReplicaState, name='COLLECTION_REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE) accessed_at = Column(DateTime) _table_args = (PrimaryKeyConstraint('scope', 'name', 'rse_id', name='COLLECTION_REPLICAS_PK'), ForeignKeyConstraint(['scope', 'name'], ['dids.scope', 'dids.name'], name='COLLECTION_REPLICAS_LFN_FK'), @@ -863,7 +855,7 @@ class UpdatedCollectionReplica(BASE, ModelBase): id = Column(GUID(), default=utils.generate_uuid) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='UPDATED_COL_REP_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='UPDATED_COL_REP_TYPE_CHK')) rse_id = Column(GUID()) _table_args = (PrimaryKeyConstraint('id', name='UPDATED_COL_REP_PK'), CheckConstraint('SCOPE IS NOT NULL', name='UPDATED_COL_REP_SCOPE_NN'), @@ -881,7 +873,6 @@ class RSEFileAssociationHistory(BASE, ModelBase): _table_args = (PrimaryKeyConstraint('rse_id', 'scope', 'name', name='REPLICAS_HIST_PK'), ForeignKeyConstraint(['rse_id'], ['rses.id'], name='REPLICAS_HIST_RSE_ID_FK'), CheckConstraint('bytes IS NOT NULL', name='REPLICAS_HIST_SIZE_NN')) -# ForeignKeyConstraint(['scope', 'name'], ['dids.scope', 'dids.name'], name='REPLICAS_HIST_LFN_FK'), class ReplicationRule(BASE, ModelBase): @@ -892,8 +883,8 @@ class ReplicationRule(BASE, ModelBase): account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='RULES_DID_TYPE_CHK')) - state = Column(RuleState.db_type(name='RULES_STATE_CHK'), default=RuleState.REPLICATING) + did_type = Column(Enum(DIDType, name='RULES_DID_TYPE_CHK')) + state = Column(Enum(RuleState, name='RULES_STATE_CHK'), default=RuleState.REPLICATING) error = Column(String(255)) rse_expression = Column(String(3000)) copies = Column(SmallInteger, server_default='1') @@ -905,8 +896,8 @@ class ReplicationRule(BASE, ModelBase): locks_stuck_cnt = Column(BigInteger, server_default='0') source_replica_expression = Column(String(255)) activity = Column(String(50), default='default') - grouping = Column(RuleGrouping.db_type(name='RULES_GROUPING_CHK'), default=RuleGrouping.ALL) - notification = Column(RuleNotification.db_type(name='RULES_NOTIFICATION_CHK'), default=RuleNotification.NO) + grouping = Column(Enum(RuleGrouping, name='RULES_GROUPING_CHK'), default=RuleGrouping.ALL) + notification = Column(Enum(RuleNotification, name='RULES_NOTIFICATION_CHK'), default=RuleNotification.NO) stuck_at = Column(DateTime) purge_replicas = Column(Boolean(name='RULES_PURGE_REPLICAS_CHK'), default=False) ignore_availability = Column(Boolean(name='RULES_IGNORE_AVAILABILITY_CHK'), default=False) @@ -949,8 +940,8 @@ class ReplicationRuleHistoryRecent(BASE, ModelBase): account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='RULES_HIST_RECENT_DIDTYPE_CHK')) - state = Column(RuleState.db_type(name='RULES_HIST_RECENT_STATE_CHK')) + did_type = Column(Enum(DIDType, name='RULES_HIST_RECENT_DIDTYPE_CHK')) + state = Column(Enum(RuleState, name='RULES_HIST_RECENT_STATE_CHK')) error = Column(String(255)) rse_expression = Column(String(3000)) copies = Column(SmallInteger) @@ -962,8 +953,8 @@ class ReplicationRuleHistoryRecent(BASE, ModelBase): locks_stuck_cnt = Column(BigInteger) source_replica_expression = Column(String(255)) activity = Column(String(50)) - grouping = Column(RuleGrouping.db_type(name='RULES_HIST_RECENT_GROUPING_CHK')) - notification = Column(RuleNotification.db_type(name='RULES_HIST_RECENT_NOTIFY_CHK')) + grouping = Column(Enum(RuleGrouping, name='RULES_HIST_RECENT_GROUPING_CHK')) + notification = Column(Enum(RuleNotification, name='RULES_HIST_RECENT_NOTIFY_CHK')) stuck_at = Column(DateTime) purge_replicas = Column(Boolean()) ignore_availability = Column(Boolean()) @@ -989,8 +980,8 @@ class ReplicationRuleHistory(BASE, ModelBase): account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='RULES_HISTORY_DIDTYPE_CHK')) - state = Column(RuleState.db_type(name='RULES_HISTORY_STATE_CHK')) + did_type = Column(Enum(DIDType, name='RULES_HISTORY_DIDTYPE_CHK')) + state = Column(Enum(RuleState, name='RULES_HISTORY_STATE_CHK')) error = Column(String(255)) rse_expression = Column(String(3000)) copies = Column(SmallInteger) @@ -1002,8 +993,8 @@ class ReplicationRuleHistory(BASE, ModelBase): locks_stuck_cnt = Column(BigInteger) source_replica_expression = Column(String(255)) activity = Column(String(50)) - grouping = Column(RuleGrouping.db_type(name='RULES_HISTORY_GROUPING_CHK')) - notification = Column(RuleNotification.db_type(name='RULES_HISTORY_NOTIFY_CHK')) + grouping = Column(Enum(RuleGrouping, name='RULES_HISTORY_GROUPING_CHK')) + notification = Column(Enum(RuleNotification, name='RULES_HISTORY_NOTIFY_CHK')) stuck_at = Column(DateTime) priority = Column(Integer) purge_replicas = Column(Boolean()) @@ -1029,7 +1020,7 @@ class ReplicaLock(BASE, ModelBase): rse_id = Column(GUID()) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) bytes = Column(BigInteger) - state = Column(LockState.db_type(name='LOCKS_STATE_CHK'), default=LockState.REPLICATING) + state = Column(Enum(LockState, name='LOCKS_STATE_CHK'), default=LockState.REPLICATING) repair_cnt = Column(BigInteger) _table_args = (PrimaryKeyConstraint('scope', 'name', 'rule_id', 'rse_id', name='LOCKS_PK'), # ForeignKeyConstraint(['rse_id', 'scope', 'name'], ['replicas.rse_id', 'replicas.scope', 'replicas.name'], name='LOCKS_REPLICAS_FK'), @@ -1048,7 +1039,7 @@ class DatasetLock(BASE, ModelBase): rule_id = Column(GUID()) rse_id = Column(GUID()) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - state = Column(LockState.db_type(name='DATASET_LOCKS_STATE_CHK'), default=LockState.REPLICATING) + state = Column(Enum(LockState, name='DATASET_LOCKS_STATE_CHK'), default=LockState.REPLICATING) length = Column(BigInteger) bytes = Column(BigInteger) accessed_at = Column(DateTime) @@ -1080,14 +1071,14 @@ class Request(BASE, ModelBase, Versioned): """Represents a request for a single file with a third party service""" __tablename__ = 'requests' id = Column(GUID(), default=utils.generate_uuid) - request_type = Column(RequestType.db_type(name='REQUESTS_TYPE_CHK'), default=RequestType.TRANSFER) + request_type = Column(Enum(RequestType, name='REQUESTS_TYPE_CHK'), default=RequestType.TRANSFER) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='REQUESTS_DIDTYPE_CHK'), default=DIDType.FILE) + did_type = Column(Enum(DIDType, name='REQUESTS_DIDTYPE_CHK'), default=DIDType.FILE) dest_rse_id = Column(GUID()) source_rse_id = Column(GUID()) attributes = Column(String(4000)) - state = Column(RequestState.db_type(name='REQUESTS_STATE_CHK'), default=RequestState.QUEUED) + state = Column(Enum(RequestState, name='REQUESTS_STATE_CHK'), default=RequestState.QUEUED) external_id = Column(String(64)) external_host = Column(String(256)) retry_count = Column(Integer(), server_default='0') @@ -1180,7 +1171,7 @@ class Subscription(BASE, ModelBase, Versioned): filter = Column(String(2048)) replication_rules = Column(String(1024)) policyid = Column(SmallInteger, server_default='0') - state = Column(SubscriptionState.db_type(name='SUBSCRIPTIONS_STATE_CHK', default=SubscriptionState.ACTIVE)) + state = Column(Enum(SubscriptionState, name='SUBSCRIPTIONS_STATE_CHK'), default=SubscriptionState.ACTIVE) last_processed = Column(DateTime, default=datetime.datetime.utcnow()) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) lifetime = Column(DateTime) @@ -1293,7 +1284,7 @@ class NamingConvention(BASE, ModelBase): __tablename__ = 'naming_conventions' scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) regexp = Column(String(255)) - convention_type = Column(KeyType.db_type(name='CVT_TYPE_CHK')) + convention_type = Column(Enum(KeyType, name='CVT_TYPE_CHK')) _table_args = (PrimaryKeyConstraint('scope', name='NAMING_CONVENTIONS_PK'), ForeignKeyConstraint(['scope'], ['scopes.scope'], name='NAMING_CONVENTIONS_SCOPE_FK')) @@ -1326,11 +1317,11 @@ class LifetimeExceptions(BASE, ModelBase): id = Column(GUID(), default=utils.generate_uuid) scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) - did_type = Column(DIDType.db_type(name='LIFETIME_EXCEPT_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='LIFETIME_EXCEPT_TYPE_CHK')) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) pattern = Column(String(255)) comments = Column(String(4000)) - state = Column(LifetimeExceptionsState.db_type(name='LIFETIME_EXCEPT_STATE_CHK')) + state = Column(Enum(LifetimeExceptionsState, name='LIFETIME_EXCEPT_STATE_CHK')) expires_at = Column(DateTime) _table_args = (PrimaryKeyConstraint('id', 'scope', 'name', 'did_type', 'account', name='LIFETIME_EXCEPT_PK'), CheckConstraint('SCOPE IS NOT NULL', name='LIFETIME_EXCEPT_SCOPE_NN'), @@ -1354,7 +1345,7 @@ class DidsFollowed(BASE, ModelBase): scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - did_type = Column(DIDType.db_type(name='DIDS_FOLLOWED_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='DIDS_FOLLOWED_TYPE_CHK')) _table_args = (PrimaryKeyConstraint('scope', 'name', 'account', name='DIDS_FOLLOWED_PK'), CheckConstraint('SCOPE IS NOT NULL', name='DIDS_FOLLOWED_SCOPE_NN'), CheckConstraint('NAME IS NOT NULL', name='DIDS_FOLLOWED_NAME_NN'), @@ -1370,7 +1361,7 @@ class FollowEvents(BASE, ModelBase): scope = Column(InternalScopeString(get_schema_value('SCOPE_LENGTH'))) name = Column(String(get_schema_value('NAME_LENGTH'))) account = Column(InternalAccountString(get_schema_value('ACCOUNT_LENGTH'))) - did_type = Column(DIDType.db_type(name='DIDS_FOLLOWED_EVENTS_TYPE_CHK')) + did_type = Column(Enum(DIDType, name='DIDS_FOLLOWED_EVENTS_TYPE_CHK')) event_type = Column(String(1024)) payload = Column(Text) _table_args = (PrimaryKeyConstraint('scope', 'name', 'account', name='DIDS_FOLLOWED_EVENTS_PK'), diff --git a/lib/rucio/tests/test_abacus_collection_replica.py b/lib/rucio/tests/test_abacus_collection_replica.py index b18d877080..37de35685b 100644 --- a/lib/rucio/tests/test_abacus_collection_replica.py +++ b/lib/rucio/tests/test_abacus_collection_replica.py @@ -20,6 +20,7 @@ # - Eli Chadwick , 2020 # - Benedikt Ziemons , 2020 # - Martin Barisits , 2020 +# - Mario Lassnig , 2020 import os import unittest @@ -85,7 +86,7 @@ def test_abacus_collection_replica_cleanup(self): scope = InternalScope('mock', **self.vo) dataset = 'dataset_%s' % generate_uuid() jdoe = InternalAccount('jdoe', **self.vo) - add_did(scope, dataset, DIDType.from_sym('DATASET'), jdoe) + add_did(scope, dataset, DIDType.DATASET, jdoe) models.CollectionReplica(scope=scope, name=dataset, rse_id=rse_id1, state=ReplicaState.AVAILABLE, bytes=1).save(session=db_session, flush=False) models.CollectionReplica(scope=scope, name=dataset, rse_id=rse_id2, state=ReplicaState.AVAILABLE, bytes=1).save(session=db_session, flush=False) diff --git a/lib/rucio/tests/test_account.py b/lib/rucio/tests/test_account.py index bafdd1401f..5a5cc9da2c 100644 --- a/lib/rucio/tests/test_account.py +++ b/lib/rucio/tests/test_account.py @@ -14,7 +14,7 @@ # limitations under the License. # # Authors: -# - Mario Lassnig , 2012-2017 +# - Mario Lassnig , 2012-2020 # - Thomas Beermann , 2012-2013 # - Angelos Molfetas , 2012 # - Vincent Garonne , 2012-2017 @@ -26,8 +26,6 @@ # - Eli Chadwick , 2020 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 -# -# PY3K COMPATIBLE import unittest from json import loads @@ -171,7 +169,7 @@ def test_del_user_success(rest_client, auth_token): response = rest_client.get('/accounts/' + acntusr, headers=headers(auth(auth_token))) assert response.status_code == 200 body = loads(response.get_data(as_text=True)) - assert body['status'] == AccountStatus.DELETED.description # pylint: disable=no-member + assert body['status'] == AccountStatus.DELETED.name def test_del_user_failure(rest_client, auth_token): diff --git a/lib/rucio/tests/test_bb8.py b/lib/rucio/tests/test_bb8.py index d425a9cc5c..e383ac8ab0 100644 --- a/lib/rucio/tests/test_bb8.py +++ b/lib/rucio/tests/test_bb8.py @@ -1,4 +1,5 @@ -# Copyright 2016-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2016-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +20,7 @@ # - Hannes Hansen , 2019 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 +# - Mario Lassnig , 2020 import unittest from datetime import datetime @@ -92,7 +94,7 @@ def test_bb8_rebalance_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] diff --git a/lib/rucio/tests/test_dataset_replicas.py b/lib/rucio/tests/test_dataset_replicas.py index ae0e2b65d2..ab876c21ad 100644 --- a/lib/rucio/tests/test_dataset_replicas.py +++ b/lib/rucio/tests/test_dataset_replicas.py @@ -16,14 +16,12 @@ # Authors: # - Vincent Garonne , 2015 # - Cedric Serfon , 2015 -# - Mario Lassnig , 2018 +# - Mario Lassnig , 2018-2020 # - Hannes Hansen , 2018-2019 # - Martin Barisits , 2019 # - Andrew Lister , 2019 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 -# -# PY3K COMPATIBLE import unittest @@ -32,6 +30,7 @@ from rucio.client.didclient import DIDClient from rucio.client.replicaclient import ReplicaClient from rucio.client.ruleclient import RuleClient +from rucio.db.sqla.constants import ReplicaState from rucio.common.config import config_get, config_get_bool from rucio.common.exception import InvalidObject from rucio.common.types import InternalAccount, InternalScope @@ -246,7 +245,7 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE # Delete one file replica -> dataset replica should be unavailable delete_replicas(rse_id=self.rse_id, files=[files[0]], session=self.db_session) @@ -257,7 +256,7 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'UNAVAILABLE' + assert dataset_replica['state'] == ReplicaState.UNAVAILABLE # Add one file replica -> dataset replica should be available again add_replicas(rse_id=self.rse_id, files=[files[0]], account=self.account, session=self.db_session) @@ -270,7 +269,7 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE # Delete all file replicas -> dataset replica should be deleted delete_replicas(rse_id=self.rse_id, files=files, session=self.db_session) @@ -295,7 +294,7 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE # Delete first replica on first RSE -> replica on first RSE should be unavailable, replica on second RSE should be still available delete_replicas(rse_id=self.rse_id, files=[files[0]], session=self.db_session) @@ -309,13 +308,13 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'UNAVAILABLE' + assert dataset_replica['state'] == ReplicaState.UNAVAILABLE dataset_replica = self.db_session.query(models.CollectionReplica).filter_by(scope=self.scope, name=dataset_name, rse_id=self.rse2_id).one() # pylint: disable=no-member assert dataset_replica['bytes'] == len(files) * file_size assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE # Set the state of the first replica on the second RSE to UNAVAILABLE -> both replicass should be unavailable file_replica = self.db_session.query(models.RSEFileAssociation).filter_by(rse_id=self.rse2_id, scope=self.scope, name=files[0]['name']).one() # pylint: disable=no-member @@ -329,13 +328,13 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'UNAVAILABLE' + assert dataset_replica['state'] == ReplicaState.UNAVAILABLE dataset_replica = self.db_session.query(models.CollectionReplica).filter_by(scope=self.scope, name=dataset_name, rse_id=self.rse2_id).one() # pylint: disable=no-member assert dataset_replica['bytes'] == len(files) * file_size assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'UNAVAILABLE' + assert dataset_replica['state'] == ReplicaState.UNAVAILABLE # Delete first replica on second RSE -> file is not longer part of dataset -> both replicas should be available delete_replicas(rse_id=self.rse2_id, files=[files[0]], session=self.db_session) @@ -348,13 +347,13 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) - 1 assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE dataset_replica = self.db_session.query(models.CollectionReplica).filter_by(scope=self.scope, name=dataset_name, rse_id=self.rse2_id).one() # pylint: disable=no-member assert dataset_replica['bytes'] == (len(files) - 1) * file_size assert dataset_replica['length'] == len(files) - 1 assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE # Add first replica to the first RSE -> first replicas should be available add_replicas(rse_id=self.rse_id, files=[files[0]], account=self.account, session=self.db_session) @@ -368,13 +367,13 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE dataset_replica = self.db_session.query(models.CollectionReplica).filter_by(scope=self.scope, name=dataset_name, rse_id=self.rse2_id).one() # pylint: disable=no-member assert dataset_replica['bytes'] == len(files) * file_size assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == (len(files) - 1) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - 1 - assert str(dataset_replica['state']) == 'UNAVAILABLE' + assert dataset_replica['state'] == ReplicaState.UNAVAILABLE # Add first replica to the second RSE -> both replicas should be available again add_replicas(rse_id=self.rse2_id, files=[files[0]], account=self.account, session=self.db_session) @@ -387,10 +386,10 @@ def test_update_collection_replica(self): assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE dataset_replica = self.db_session.query(models.CollectionReplica).filter_by(scope=self.scope, name=dataset_name, rse_id=self.rse2_id).one() # pylint: disable=no-member assert dataset_replica['bytes'] == len(files) * file_size assert dataset_replica['length'] == len(files) assert dataset_replica['available_bytes'] == len(files) * file_size assert dataset_replica['available_replicas_cnt'] == len(files) - assert str(dataset_replica['state']) == 'AVAILABLE' + assert dataset_replica['state'] == ReplicaState.AVAILABLE diff --git a/lib/rucio/tests/test_did.py b/lib/rucio/tests/test_did.py index a4466919bb..01cf055536 100644 --- a/lib/rucio/tests/test_did.py +++ b/lib/rucio/tests/test_did.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2013-2020 CERN for the benefit of the ATLAS collaboration. +# Copyright 2013-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ # Authors: # - Vincent Garonne , 2013-2018 # - Martin Barisits , 2013-2020 -# - Mario Lassnig , 2013-2018 +# - Mario Lassnig , 2013-2020 # - Cedric Serfon , 2013-2020 # - Ralph Vigne , 2013 # - Yun-Pin Sun , 2013 @@ -261,7 +261,7 @@ def test_list_new_dids(self): did.add_did(scope=tmp_scope, name='%s-%i' % (tmp_dsn, i), type='DATASET', issuer='root', **self.vo) for i in did.list_new_dids('DATASET', **self.vo): assert i != {} - assert str(i['did_type']) == 'DATASET' + assert i['did_type'] == DIDType.DATASET break for i in did.list_new_dids(**self.vo): assert i != {} diff --git a/lib/rucio/tests/test_import_export.py b/lib/rucio/tests/test_import_export.py index 81e972b59b..4bcf7fb766 100644 --- a/lib/rucio/tests/test_import_export.py +++ b/lib/rucio/tests/test_import_export.py @@ -142,7 +142,7 @@ def check_accounts(self): if identities: for identity in identities: # check identity creation and identity-account association - identity_type = IdentityType.from_sym(identity['type']) + identity_type = IdentityType[identity['type'].upper()] identity = identity['identity'] assert (identity, identity_type) in db_identities accounts_for_identity = list_accounts_for_identity(identity, identity_type) diff --git a/lib/rucio/tests/test_judge_cleaner.py b/lib/rucio/tests/test_judge_cleaner.py index 56a3df9dc2..eb2f66083f 100644 --- a/lib/rucio/tests/test_judge_cleaner.py +++ b/lib/rucio/tests/test_judge_cleaner.py @@ -1,4 +1,5 @@ -# Copyright 2014-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2014-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +20,7 @@ # - Hannes Hansen , 2019 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 +# - Mario Lassnig , 2020 import unittest @@ -87,7 +89,7 @@ def test_judge_expire_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=-3, locked=False, subscription_id=None)[0] @@ -105,7 +107,7 @@ def test_judge_expire_rule_with_child_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] diff --git a/lib/rucio/tests/test_judge_evaluator.py b/lib/rucio/tests/test_judge_evaluator.py index f4933529f7..8e72e6ecdf 100644 --- a/lib/rucio/tests/test_judge_evaluator.py +++ b/lib/rucio/tests/test_judge_evaluator.py @@ -1,4 +1,5 @@ -# Copyright 2014-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2014-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,7 @@ # Authors: # - Martin Barisits , 2014-2016 # - Vincent Garonne , 2014-2015 -# - Mario Lassnig , 2014-2019 +# - Mario Lassnig , 2014-2020 # - Cedric Serfon , 2019 # - Andrew Lister , 2019 # - Hannes Hansen , 2019 @@ -91,7 +92,7 @@ def test_judge_add_files_to_dataset(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -114,11 +115,11 @@ def test_judge_add_dataset_to_container(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) parent_container = 'dataset_' + str(uuid()) - add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, parent_container, DIDType.CONTAINER, self.jdoe) # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': parent_container}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) attach_dids(scope, parent_container, [{'scope': scope, 'name': dataset}], self.jdoe) @@ -141,7 +142,7 @@ def test_account_counter_judge_evaluate_attach(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -165,7 +166,7 @@ def test_account_counter_judge_evaluate_detach(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -192,11 +193,11 @@ def test_judge_evaluate_detach_datasetlock(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) # Add a rule to the Container @@ -220,26 +221,26 @@ def test_judge_evaluate_detach(self): scope = InternalScope('mock', **self.vo) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) @@ -263,7 +264,7 @@ def test_judge_add_files_to_dataset_with_2_rules(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -287,15 +288,15 @@ def test_judge_add_files_to_dataset_rule_on_container(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) parent_container = 'dataset_' + str(uuid()) - add_did(scope, parent_container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, parent_container, DIDType.CONTAINER, self.jdoe) attach_dids(scope, parent_container, [{'scope': scope, 'name': dataset}], self.jdoe) parent_parent_container = 'dataset_' + str(uuid()) - add_did(scope, parent_parent_container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, parent_parent_container, DIDType.CONTAINER, self.jdoe) attach_dids(scope, parent_parent_container, [{'scope': scope, 'name': parent_container}], self.jdoe) # Add a first rule to the DS diff --git a/lib/rucio/tests/test_judge_injector.py b/lib/rucio/tests/test_judge_injector.py index 5a7363184b..998bdfcd0a 100644 --- a/lib/rucio/tests/test_judge_injector.py +++ b/lib/rucio/tests/test_judge_injector.py @@ -1,4 +1,5 @@ -# Copyright 2015-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2015-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +16,12 @@ # Authors: # - Martin Barisits , 2015-2019 # - Vincent Garonne , 2015 -# - Joaquin Bogado , 2018 +# - Joaquín Bogado , 2018 # - Andrew Lister , 2019 # - Hannes Hansen , 2019 # - Patrick Austin , 2020 # - Benedikt Ziemons , 2020 +# - Mario Lassnig , 2020 import unittest @@ -91,7 +93,7 @@ def test_judge_inject_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -111,7 +113,7 @@ def test_judge_ask_approval(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -135,7 +137,7 @@ def test_judge_deny_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -151,13 +153,13 @@ def test_add_rule_with_r2d2_container_treating(self): """ JUDGE INJECTOR (CORE): Add a replication rule with an r2d2 container treatment""" scope = InternalScope('mock', **self.vo) container = 'asdf.r2d2_request.2016-04-01-15-00-00.ads.' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) datasets = [] for i in range(3): files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) datasets.append(dataset) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': container}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=900, locked=False, subscription_id=None, ask_approval=True)[0] @@ -174,13 +176,13 @@ def test_add_rule_with_r2d2_container_treating_and_duplicate_rule(self): """ JUDGE INJECTOR (CORE): Add a replication rule with an r2d2 container treatment and duplicate rule""" scope = InternalScope('mock', **self.vo) container = 'asdf.r2d2_request.2016-04-01-15-00-00.ads.' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) datasets = [] for i in range(3): files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) datasets.append(dataset) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=900, locked=False, subscription_id=None, ask_approval=False) diff --git a/lib/rucio/tests/test_judge_repairer.py b/lib/rucio/tests/test_judge_repairer.py index e00a262dec..4fefb193da 100644 --- a/lib/rucio/tests/test_judge_repairer.py +++ b/lib/rucio/tests/test_judge_repairer.py @@ -1,4 +1,5 @@ -# Copyright 2014-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2014-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,7 @@ # Authors: # - Martin Barisits , 2014-2019 # - Vincent Garonne , 2014-2015 -# - Mario Lassnig , 2014 +# - Mario Lassnig , 2014-2020 # - Andrew Lister , 2019 # - Brandon White , 2019 # - Hannes Hansen , 2019 @@ -101,7 +102,7 @@ def test_to_repair_a_rule_with_NONE_grouping_whose_transfer_failed(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -128,7 +129,7 @@ def test_to_repair_a_rule_with_ALL_grouping_whose_transfer_failed(self): scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None, activity='DebugJudge')[0] @@ -152,7 +153,7 @@ def test_to_repair_a_rule_with_DATASET_grouping_whose_transfer_failed(self): scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, activity='DebugJudge')[0] @@ -174,7 +175,7 @@ def test_repair_a_rule_with_missing_locks(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse4_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) # Add a first rule to the DS rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -212,7 +213,7 @@ def test_repair_a_rule_with_source_replica_expression(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse4_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -247,7 +248,7 @@ def test_to_repair_a_rule_with_only_1_rse_whose_transfers_failed(self): scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -276,7 +277,7 @@ def test_to_repair_a_rule_with_NONE_grouping_whose_transfer_failed_and_flipping_ scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.T1, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -306,7 +307,7 @@ def test_to_repair_a_rule_with_only_1_rse_whose_site_is_blacklisted(self): scope = InternalScope('mock', **self.vo) files = create_files(4, scope, self.rse4_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=rse, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, ignore_availability=True, activity='DebugJudge')[0] diff --git a/lib/rucio/tests/test_oidc.py b/lib/rucio/tests/test_oidc.py index 4c71572a67..1443cd9207 100644 --- a/lib/rucio/tests/test_oidc.py +++ b/lib/rucio/tests/test_oidc.py @@ -1,3 +1,23 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 CERN +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Authors: +# - Jaroslav Guenther , 2020 +# - Patrick Austin , 2020 +# - Benedikt Ziemons , 2020 +# - Mario Lassnig , 2020 # Copyright 2020 CERN for the benefit of the ATLAS collaboration. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -298,11 +318,11 @@ def setUp(self): self.adminClientSUB = str('adminclientSUB' + rndstr()).lower() self.adminClientSUB_otherISS = str('adminclientSUB_otherISS' + rndstr()).lower() try: - add_account(self.account, AccountType.from_sym('USER'), 'rucio@email.com', session=self.db_session) + add_account(self.account, AccountType.USER, 'rucio@email.com', session=self.db_session) except Duplicate: pass try: - add_account(self.adminaccount, AccountType.from_sym('SERVICE'), 'rucio@email.com', session=self.db_session) + add_account(self.adminaccount, AccountType.SERVICE, 'rucio@email.com', session=self.db_session) except Duplicate: pass diff --git a/lib/rucio/tests/test_replica.py b/lib/rucio/tests/test_replica.py index df50e53653..deb95875fc 100644 --- a/lib/rucio/tests/test_replica.py +++ b/lib/rucio/tests/test_replica.py @@ -16,7 +16,7 @@ # Authors: # - Vincent Garonne , 2013-2018 # - Ralph Vigne , 2013-2014 -# - Mario Lassnig , 2013-2018 +# - Mario Lassnig , 2013-2020 # - Cedric Serfon , 2014-2018 # - Thomas Beermann , 2014 # - Martin Barisits , 2015-2019 @@ -64,7 +64,7 @@ from rucio.daemons.badreplicas.minos import run as minos_run from rucio.daemons.badreplicas.minos_temporary_expiration import run as minos_temp_run from rucio.daemons.badreplicas.necromancer import run as necromancer_run -from rucio.db.sqla.constants import DIDType, ReplicaState, OBSOLETE +from rucio.db.sqla.constants import DIDType, ReplicaState, BadPFNStatus, OBSOLETE from rucio.rse import rsemanager as rsemgr from rucio.tests.common import execute, rse_name_generator, headers, auth, Mime, accept @@ -298,8 +298,8 @@ def test_list_replicas_all_states(self): replica_cpt = 0 for replica in list_replicas(dids=[{'scope': f['scope'], 'name': f['name'], 'type': DIDType.FILE} for f in files], schemes=['srm'], all_states=True): assert 'states' in replica - assert replica['states'][rses[0]] == str(ReplicaState.COPYING) - assert replica['states'][rses[1]] == str(ReplicaState.AVAILABLE) + assert replica['states'][rses[0]] == str(ReplicaState.COPYING.name) + assert replica['states'][rses[1]] == str(ReplicaState.AVAILABLE.name) replica_cpt += 1 assert nbfiles == replica_cpt @@ -860,7 +860,7 @@ def test_add_temporary_unavailable_pfns(vo, replica_client): for pfn in list_rep: pfn = str(clean_surls([pfn])[0]) assert pfn in bad_pfns - assert str(bad_pfns[pfn][0]) == 'TEMPORARY_UNAVAILABLE' + assert bad_pfns[pfn][0] == BadPFNStatus.TEMPORARY_UNAVAILABLE assert bad_pfns[pfn][1] == reason_str # Submit with wrong state @@ -880,11 +880,11 @@ def test_add_temporary_unavailable_pfns(vo, replica_client): # Check the state in the replica table for did in files: rep = get_replicas_state(scope=InternalScope(did['scope'], vo=vo), name=did['name']) - assert str(list(rep.keys())[0]) == 'TEMPORARY_UNAVAILABLE' + assert list(rep.keys())[0] == ReplicaState.TEMPORARY_UNAVAILABLE rep = [] for did in files: - did['state'] = ReplicaState.from_sym('TEMPORARY_UNAVAILABLE') + did['state'] = ReplicaState.TEMPORARY_UNAVAILABLE rep.append(did) # Run the minos expiration @@ -892,7 +892,7 @@ def test_add_temporary_unavailable_pfns(vo, replica_client): # Check the state in the replica table for did in files: rep = get_replicas_state(scope=InternalScope(did['scope'], vo=vo), name=did['name']) - assert str(list(rep.keys())[0]) == 'AVAILABLE' + assert list(rep.keys())[0] == ReplicaState.AVAILABLE def test_set_tombstone2(vo, replica_client): diff --git a/lib/rucio/tests/test_rule.py b/lib/rucio/tests/test_rule.py index a804b2c803..ab967dab7a 100644 --- a/lib/rucio/tests/test_rule.py +++ b/lib/rucio/tests/test_rule.py @@ -1,4 +1,5 @@ -# Copyright 2013-2020 CERN for the benefit of the ATLAS collaboration. +# -*- coding: utf-8 -*- +# Copyright 2013-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,10 +15,10 @@ # # Authors: # - Martin Barisits , 2013-2020 -# - Mario Lassnig , 2013-2017 +# - Mario Lassnig , 2013-2020 # - Vincent Garonne , 2013-2015 # - Cedric Serfon , 2015-2019 -# - Joaquin Bogado , 2018 +# - Joaquín Bogado , 2018 # - Hannes Hansen , 2019 # - Robert Illingworth , 2019 # - Andrew Lister , 2019 @@ -26,8 +27,6 @@ # - Patrick Austin , 2020 # - Jaroslav Guenther , 2020 # - Benedikt Ziemons , 2020 -# -# PY3K COMPATIBLE import json import random @@ -207,7 +206,7 @@ def test_add_rule_dataset_none(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -228,7 +227,7 @@ def test_add_rule_duplicate(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) # Add a first rule to the DS @@ -242,12 +241,12 @@ def test_add_rules_datasets_none(self): scope = InternalScope('mock', **self.vo) files1 = create_files(3, scope, self.rse4_id) dataset1 = 'dataset_' + str(uuid()) - add_did(scope, dataset1, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset1, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset1, files1, self.jdoe) files2 = create_files(3, scope, self.rse4_id) dataset2 = 'dataset_' + str(uuid()) - add_did(scope, dataset2, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset2, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset2, files2, self.jdoe) # Add the rules to both DS @@ -282,13 +281,13 @@ def test_add_rule_container_none(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, NONE Grouping""" scope = InternalScope('mock', **self.vo) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) all_files = [] for i in range(3): files = create_files(3, scope, self.rse1_id) all_files.extend(files) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) @@ -303,7 +302,7 @@ def test_add_rule_dataset_all(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -327,13 +326,13 @@ def test_add_rule_container_all(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, ALL Grouping""" scope = InternalScope('mock', **self.vo) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) all_files = [] for i in range(3): files = create_files(3, scope, self.rse1_id) all_files.extend(files) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) @@ -353,7 +352,7 @@ def test_add_rule_requests(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -378,7 +377,7 @@ def test_add_rule_dataset_dataset(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -390,14 +389,14 @@ def test_add_rule_container_dataset(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, DATASET Grouping""" scope = InternalScope('mock', **self.vo) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) all_files = [] dataset_files = [] for i in range(3): files = create_files(3, scope, self.rse1_id) all_files.extend(files) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) dataset_files.append({'scope': scope, 'name': dataset, 'files': files}) @@ -419,7 +418,7 @@ def test_add_rule_dataset_none_with_weights(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='NONE', weight="fakeweight", lifetime=None, locked=False, subscription_id=None) @@ -435,14 +434,14 @@ def test_add_rule_container_dataset_with_weights(self): """ REPLICATION RULE (CORE): Add a replication rule on a container, DATASET Grouping, WEIGHTS""" scope = InternalScope('mock', **self.vo) container = 'container_' + str(uuid()) - add_did(scope, container, DIDType.from_sym('CONTAINER'), self.jdoe) + add_did(scope, container, DIDType.CONTAINER, self.jdoe) all_files = [] dataset_files = [] for i in range(3): files = create_files(3, scope, self.rse1_id) all_files.extend(files) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) attach_dids(scope, container, [{'scope': scope, 'name': dataset}], self.jdoe) dataset_files.append({'scope': scope, 'name': dataset, 'files': files}) @@ -465,7 +464,7 @@ def test_get_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] @@ -477,7 +476,7 @@ def test_delete_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] @@ -492,7 +491,7 @@ def test_delete_rule_and_cancel_transfers(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] @@ -512,7 +511,7 @@ def test_locked_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] @@ -530,7 +529,7 @@ def test_account_counter_rule_create(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -547,7 +546,7 @@ def test_account_counter_rule_delete(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -569,7 +568,7 @@ def test_account_counter_rule_update(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -596,7 +595,7 @@ def test_rse_counter_unavailable_replicas(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse3, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None) @@ -614,7 +613,7 @@ def test_rule_add_fails_account_local_limit(self): files = create_files(3, scope, self.rse3_id, bytes=100) # local quota dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_local_account_limit(account=self.jdoe, rse_id=self.rse3_id, bytes=5) @@ -627,7 +626,7 @@ def test_rule_add_fails_account_global_limit(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse3_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_local_account_limit(account=self.jdoe, rse_id=self.rse3_id, bytes=400) @@ -646,7 +645,7 @@ def test_rule_add_fails_rse_limit(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_rse_limits(self.rse3_id, 'MaxSpaceAvailable', 250) @@ -663,7 +662,7 @@ def test_dataset_callback(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_status(scope=scope, name=dataset, open=False) @@ -683,7 +682,7 @@ def test_dataset_callback_no(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_status(scope=scope, name=dataset, open=False) @@ -702,7 +701,7 @@ def test_dataset_callback_close_late(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, notify='C')[0] @@ -722,7 +721,7 @@ def test_dataset_callback_with_evaluator(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, notify='C')[0] @@ -746,7 +745,7 @@ def test_rule_progress_callback_with_evaluator(self): scope = InternalScope('mock', **self.vo) files = create_files(30, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None, notify='P')[0] @@ -802,7 +801,7 @@ def test_add_rule_with_purge(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse4, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None, purge_replicas=True)[0] @@ -824,7 +823,7 @@ def test_add_rule_with_ignore_availability(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) with pytest.raises(RSEBlacklisted): @@ -849,7 +848,7 @@ def test_delete_rule_country_admin(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=rse, grouping='NONE', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -868,7 +867,7 @@ def test_reduce_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, [self.rse1_id, self.rse3_id]) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.rse1 + '|' + self.rse3, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -883,7 +882,7 @@ def test_reduce_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, [self.rse1_id, self.rse3_id]) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.rse1 + '|' + self.rse3 + '|' + self.rse5, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -896,7 +895,7 @@ def test_move_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, [self.rse1_id]) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -922,7 +921,7 @@ def test_add_rule_with_scratchdisk(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression='%s' % rse, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -939,7 +938,7 @@ def test_add_rule_with_auto_approval(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=200) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) set_status(scope=scope, name=dataset, open=False) @@ -970,7 +969,7 @@ def test_add_rule_with_manual_approval_block(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) with pytest.raises(ManualRuleApprovalBlocked): @@ -982,9 +981,9 @@ def test_update_rule_child_rule(self): files = create_files(3, scope, self.rse1_id) dataset1 = 'dataset_' + str(uuid()) dataset2 = 'dataset_' + str(uuid()) - add_did(scope, dataset1, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset1, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset1, files, self.jdoe) - add_did(scope, dataset2, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset2, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset2, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset1}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -1002,7 +1001,7 @@ def test_release_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id, bytes=100) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight=None, lifetime=None, locked=False, subscription_id=None)[0] @@ -1027,7 +1026,7 @@ def test_metadata__rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=2, rse_expression=self.T1, grouping='NONE', @@ -1218,7 +1217,7 @@ def test_add_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) ret = self.rule_client.add_replication_rule(dids=[{'scope': scope.external, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') @@ -1233,7 +1232,7 @@ def test_delete_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] @@ -1248,7 +1247,7 @@ def test_list_rules_by_did(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=False, subscription_id=None)[0] @@ -1266,7 +1265,7 @@ def test_get_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) ret = self.rule_client.add_replication_rule(dids=[{'scope': scope.external, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') @@ -1278,7 +1277,7 @@ def test_get_rule_by_account(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) ret = self.rule_client.add_replication_rule(dids=[{'scope': scope.external, 'name': dataset}], account='jdoe', copies=2, rse_expression=self.T1, grouping='NONE') @@ -1292,7 +1291,7 @@ def test_locked_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='NONE', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] @@ -1306,7 +1305,7 @@ def test_dataset_lock(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=None, locked=True, subscription_id=None)[0] @@ -1319,7 +1318,7 @@ def test_change_rule_lifetime(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id_1 = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=150, locked=True, subscription_id=None)[0] @@ -1337,12 +1336,12 @@ def test_approve_rule(self): scope = InternalScope('mock', **self.vo) files = create_files(3, scope, self.rse1_id) dataset = 'dataset_' + str(uuid()) - add_did(scope, dataset, DIDType.from_sym('DATASET'), self.jdoe) + add_did(scope, dataset, DIDType.DATASET, self.jdoe) attach_dids(scope, dataset, files, self.jdoe) rule_id = add_rule(dids=[{'scope': scope, 'name': dataset}], account=self.jdoe, copies=1, rse_expression=self.rse1, grouping='DATASET', weight='fakeweight', lifetime=150, locked=True, subscription_id=None, ask_approval=True)[0] rule = self.rule_client.get_replication_rule(rule_id) - assert rule['state'] == str(RuleState.WAITING_APPROVAL) + assert rule['state'] == RuleState.WAITING_APPROVAL.name self.rule_client.approve_replication_rule(rule_id) rule = self.rule_client.get_replication_rule(rule_id) - assert rule['state'] == str(RuleState.INJECT) + assert rule['state'] == RuleState.INJECT.name diff --git a/lib/rucio/web/rest/flaskapi/v1/replica.py b/lib/rucio/web/rest/flaskapi/v1/replica.py index 0e0b0268c1..4a0a7c5a7e 100644 --- a/lib/rucio/web/rest/flaskapi/v1/replica.py +++ b/lib/rucio/web/rest/flaskapi/v1/replica.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2013-2020 CERN +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +14,10 @@ # limitations under the License. # # Authors: -# - Vincent Garonne , 2013-2017 -# - Mario Lassnig , 2013-2018 -# - Cedric Serfon , 2014-2019 -# - Thomas Beermann , 2014-2018 +# - Thomas Beermann , 2018 +# - Mario Lassnig , 2018-2020 # - Hannes Hansen , 2018-2019 +# - Cedric Serfon , 2018-2019 # - Martin Barisits , 2019-2020 # - James Perry , 2019 # - Andrew Lister , 2019 @@ -719,7 +718,7 @@ def get(self): if 'state' in params: state = params['state'][0] if isinstance(state, string_types): - state = BadFilesStatus.from_string(state) + state = BadFilesStatus(state) if 'rse' in params: rse = params['rse'][0] if 'younger_than' in params: diff --git a/lib/rucio/web/rest/flaskapi/v1/request.py b/lib/rucio/web/rest/flaskapi/v1/request.py index 56d6108498..f8f7cde008 100644 --- a/lib/rucio/web/rest/flaskapi/v1/request.py +++ b/lib/rucio/web/rest/flaskapi/v1/request.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2014-2020 CERN +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,9 +14,8 @@ # limitations under the License. # # Authors: -# - Mario Lassnig , 2014-2018 -# - Vincent Garonne , 2017 # - Thomas Beermann , 2018 +# - Mario Lassnig , 2018-2020 # - Hannes Hansen , 2018-2019 # - Andrew Lister , 2019 # - Eli Chadwick , 2020 @@ -111,7 +110,7 @@ def get(self): return generate_http_error_flask(400, 'MissingParameter', 'Source site is missing') try: - states = [RequestState.from_string(state) for state in request_states.split(',')] + states = [RequestState[state] for state in request_states.split(',')] except ValueError: return generate_http_error_flask(400, 'Invalid', 'Request state value is invalid') diff --git a/lib/rucio/web/rest/webpy/v1/replica.py b/lib/rucio/web/rest/webpy/v1/replica.py index 5eab3716bb..47ab22bc9d 100755 --- a/lib/rucio/web/rest/webpy/v1/replica.py +++ b/lib/rucio/web/rest/webpy/v1/replica.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2013-2020 CERN +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,13 +15,11 @@ # limitations under the License. # # Authors: -# - Vincent Garonne , 2013-2017 -# - Mario Lassnig , 2013-2019 -# - Ralph Vigne , 2013 -# - Cedric Serfon , 2014-2019 # - Thomas Beermann , 2018-2020 -# - Martin Barisits , 2018-2019 +# - Martin Barisits , 2018-2020 +# - Mario Lassnig , 2018-2020 # - Hannes Hansen , 2018-2019 +# - Cedric Serfon , 2018-2019 # - James Perry , 2019-2020 # - Ilija Vukotic , 2020 # - Luc Goossens , 2020 @@ -696,7 +694,7 @@ def GET(self): if 'state' in params: state = params['state'][0] if isinstance(state, string_types): - state = BadFilesStatus.from_string(state) + state = BadFilesStatus(state) if 'rse' in params: rse = params['rse'][0] if 'younger_than' in params: diff --git a/lib/rucio/web/rest/webpy/v1/request.py b/lib/rucio/web/rest/webpy/v1/request.py index ec99d60b7a..b5bbd8faf0 100755 --- a/lib/rucio/web/rest/webpy/v1/request.py +++ b/lib/rucio/web/rest/webpy/v1/request.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2014-2020 CERN +# Copyright 2018-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,10 +15,9 @@ # limitations under the License. # # Authors: -# - Mario Lassnig , 2014-2018 -# - Vincent Garonne , 2017 -# - Hannes Hansen , 2018-2019 # - Thomas Beermann , 2018-2020 +# - Mario Lassnig , 2018-2020 +# - Hannes Hansen , 2018-2019 # - dciangot , 2018 # - Andrew Lister , 2019 # - Eli Chadwick , 2020 @@ -124,7 +123,7 @@ def GET(self): raise generate_http_error(400, 'MissingParameter', 'Source site is missing') try: - states = [RequestState.from_string(state) for state in request_states.split(',')] + states = [RequestState(state) for state in request_states.split(',')] except ValueError: raise generate_http_error(400, 'Invalid', 'Request state value is invalid') diff --git a/lib/rucio/web/ui/common/utils.py b/lib/rucio/web/ui/common/utils.py index 851f1476b3..a91e24d46b 100644 --- a/lib/rucio/web/ui/common/utils.py +++ b/lib/rucio/web/ui/common/utils.py @@ -16,7 +16,7 @@ # # Authors: # - Thomas Beermann , 2014-2020 -# - Mario Lassnig , 2014-2018 +# - Mario Lassnig , 2014-2020 # - Vincent Garonne , 2015 # - Martin Barisits , 2016-2020 # - Ruturaj Gujar , 2019 @@ -167,7 +167,7 @@ def select_account_name(identitystr, identity_type, vo=None): if vo is not None: accounts = identity.list_accounts_for_identity(identitystr, identity_type) else: - internal_accounts = identity_core.list_accounts_for_identity(identitystr, IdentityType.from_sym(identity_type)) + internal_accounts = identity_core.list_accounts_for_identity(identitystr, IdentityType[identity_type]) accounts = [account.external for account in internal_accounts] vos = [account.vo for account in internal_accounts] if vos: diff --git a/requirements.readthedocs.txt b/requirements.readthedocs.txt index 2d8d7b2ead..a5d19f2ddc 100644 --- a/requirements.readthedocs.txt +++ b/requirements.readthedocs.txt @@ -1,7 +1,7 @@ # All dependencies needed to run rucio should be defined here -SQLAlchemy==1.3.7 # DB backend -alembic==1.4.1 # Lightweight database migration tool for SQLAlchemy +SQLAlchemy==1.3.20 # DB backend +alembic==1.4.3 # Lightweight database migration tool for SQLAlchemy web.py==0.39; python_version <= '2.7' # Python web framework for Python2 web.py==0.40; python_version > '2.7' # Python web framework for Python3 python-memcached==1.59; python_version <= '2.7' # Quick and small memcached client for Python2 diff --git a/tools/bootstrap_tests.py b/tools/bootstrap_tests.py index b73b547515..a9de25c826 100755 --- a/tools/bootstrap_tests.py +++ b/tools/bootstrap_tests.py @@ -1,20 +1,30 @@ #!/usr/bin/env python3 -# Copyright European Organization for Nuclear Research (CERN) +# -*- coding: utf-8 -*- +# Copyright 2013-2020 CERN # # Licensed under the Apache License, Version 2.0 (the "License"); -# You may not use this file except in compliance with the License. +# you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # Authors: -# - Vincent Garonne, , 2013 -# - Mario Lassnig, , 2014 -# - Martin Barisits, , 2017 -# - Thomas Beermann, , 2017 -# - Cedric Serfon, , 2019 +# - Vincent Garonne , 2013-2015 +# - Mario Lassnig , 2014-2020 +# - Evangelia Liotiri , 2014 +# - Martin Barisits , 2014-2017 +# - Thomas Beermann , 2017 +# - Stefan Prenner , 2017-2018 +# - Cedric Serfon , 2019 +# - Andrew Lister , 2019 +# - Eli Chadwick , 2020 # - Patrick Austin , 2020 -# -# PY3K COMPATIBLE from rucio.api.vo import add_vo from rucio.client import Client @@ -66,7 +76,6 @@ ('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=barisits/CN=692443/CN=Martin Barisits', 'x509', 'martin.barisits@cern.ch'), ('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=tbeerman/CN=722011/CN=Thomas Beermann', 'x509', 'thomas.beermann@cern.ch'), ('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ruciobuildbot/CN=692443/CN=Robot: Rucio build bot', 'x509', 'rucio.build.bot@cern.ch'), - ('/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=sprenner/CN=822876/CN=Stefan Prenner', 'x509', 'stefan.prenner@cern.ch'), ('/CN=docker client', 'x509', 'dummy@cern.ch'), ('mlassnig@CERN.CH', 'GSS', 'mario.lassnig@cern.ch')] diff --git a/tools/run_tests_docker.sh b/tools/run_tests_docker.sh index b0e5c2a07e..e0403e94e5 100755 --- a/tools/run_tests_docker.sh +++ b/tools/run_tests_docker.sh @@ -17,7 +17,7 @@ # - Thomas Beermann , 2017-2018 # - Vincent Garonne , 2018 # - Hannes Hansen , 2018-2019 -# - Mario Lassnig , 2019 +# - Mario Lassnig , 2019-2020 # - Martin Barisits , 2019 # - Andrew Lister , 2019 # - Patrick Austin , 2020 @@ -31,20 +31,24 @@ function usage { echo "Usage: $0 [OPTION]..." echo 'Run Rucio test suite' echo '' - echo ' -h Show usage.' - echo ' -i Do only the initialization.' + echo ' -h Show usage' + echo ' -i Do only the initialization' echo ' -r Activate default RSEs (XRD1, XRD2, XRD3)' echo ' -s Run special tests for Dirac. Includes using BelleII schema' + echo ' -t Less verbose output from pytest' + echo ' -a Skip alembic downgrade/upgrade test' exit } -while getopts hirs opt +while getopts hirsta opt do case "$opt" in h) usage;; i) init_only="true";; r) activate_rse="true";; s) special="true";; + t) notrace="true";; + a) noalembic="true";; esac done export RUCIO_HOME=/opt/etc/test @@ -73,7 +77,7 @@ if test ${special}; then ln -s /opt/rucio/etc/rucio.cfg.special /opt/rucio/etc/rucio.cfg else if [ -f /opt/rucio/etc/rucio.cfg ]; then - echo 'Using the standard conig' + echo 'Using the standard config' else echo 'rucio.cfg not found. Will try to do a symlink' ln -s /opt/rucio/etc/rucio.cfg.default /opt/rucio/etc/rucio.cfg @@ -92,14 +96,18 @@ if [ -f /tmp/rucio.db ]; then chmod 777 /tmp/rucio.db fi -echo "Running full alembic migration" -ALEMBIC_CONFIG="/opt/rucio/etc/alembic.ini" tools/alembic_migration.sh -if [ $? != 0 ]; then - echo 'Failed to run alembic migration!' - exit 1 +if test ${noalembic}; then + echo "Skipping alembic migration" +else + echo "Running full alembic migration" + ALEMBIC_CONFIG="/opt/rucio/etc/alembic.ini" tools/alembic_migration.sh + if [ $? != 0 ]; then + echo 'Failed to run alembic migration!' + exit 1 + fi fi -echo 'Bootstrap tests: Create jdoe account/mock scope' +echo 'Bootstrapping tests' tools/bootstrap_tests.py if [ $? != 0 ]; then echo 'Failed to bootstrap!' @@ -128,13 +136,6 @@ if [ $? != 0 ]; then exit 1 fi -echo 'Bootstrap tests: Create jdoe account/mock scope' -tools/bootstrap_tests.py -if [ $? != 0 ]; then - echo 'Failed to bootstrap!' - exit 1 -fi - if test ${activate_rse}; then echo 'Activating default RSEs (XRD1, XRD2, XRD3)' tools/docker_activate_rses.sh