From 9f26eb417dcdcfc52b2d146547a711154c7d6980 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Wed, 12 Jun 2024 15:43:03 +0200 Subject: [PATCH 01/57] removed UUIDS from Graph in borg --- src/easyscience/Objects/Graph.py | 131 ++++++++++--------------------- 1 file changed, 42 insertions(+), 89 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 2ca59309..0b910e07 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -73,24 +73,14 @@ def is_returned(self) -> bool: return "returned" in self._type -class UniqueIdMap(WeakKeyDictionary): - def __init__(self, this_dict: dict = None): - super().__init__(self) - # replace data with a defaultdict to generate uuids - self.data = defaultdict(uuid4) - if this_dict is not None: - self.update(this_dict) - - -uniqueidmap = UniqueIdMap() - - class Graph: def __init__(self): + # A dictionary of object names and their corresponding objects self._store = weakref.WeakValueDictionary() + # A dict with object names as keys and a list of their object types as values, with weak references self.__graph_dict = {} - def vertices(self) -> List[int]: + def vertices(self) -> List[str]: """returns the vertices of a graph""" return list(self._store.keys()) @@ -99,63 +89,69 @@ def edges(self): return self.__generate_edges() @property - def argument_objs(self) -> List[int]: + def argument_objs(self) -> List[str]: return self._nested_get("argument") @property - def created_objs(self) -> List[int]: + def created_objs(self) -> List[str]: return self._nested_get("created") @property - def created_internal(self) -> List[int]: + def created_internal(self) -> List[str]: return self._nested_get("created_internal") @property - def returned_objs(self) -> List[int]: + def returned_objs(self) -> List[str]: return self._nested_get("returned") - def get_item_by_key(self, item_id: int) -> object: + def _nested_get(self, obj_type: str) -> List[str]: + """Access a nested object in root by key sequence.""" + extracted_list = [] + for key, item in self.__graph_dict.items(): + if obj_type in item.type: + extracted_list.append(key) + return extracted_list + + + def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] raise ValueError def is_known(self, vertex: object) -> bool: - return self.convert_id(vertex).int in self._store.keys() + # All objects should have a 'name' attribute + return vertex.name in self._store.keys() def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - oid = self.convert_id(vertex) - return self.__graph_dict[oid].type + return self.__graph_dict[vertex.name].type def reset_type(self, obj, default_type: str): - if self.convert_id(obj).int in self.__graph_dict.keys(): - self.__graph_dict[self.convert_id(obj).int].reset_type(default_type) + if obj.name in self.__graph_dict.keys(): + self.__graph_dict[obj.name].reset_type(default_type) def change_type(self, obj, new_type: str): - if self.convert_id(obj).int in self.__graph_dict.keys(): - self.__graph_dict[self.convert_id(obj).int].type = new_type + if obj.name in self.__graph_dict.keys(): + self.__graph_dict[obj.name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): - oid = self.convert_id(obj).int - self._store[oid] = obj - self.__graph_dict[oid] = _EntryList() # Enhanced list of keys - self.__graph_dict[oid].finalizer = weakref.finalize( - self._store[oid], self.prune, oid + name = obj.name + self._store[name] = obj + self.__graph_dict[name] = _EntryList() # Add objects type to the list of types + self.__graph_dict[name].finalizer = weakref.finalize( + self._store[name], self.prune, name ) - self.__graph_dict[oid].type = obj_type + self.__graph_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - vertex1 = self.convert_id(start_obj).int - vertex2 = self.convert_id(end_obj).int - if vertex1 in self.__graph_dict.keys(): - self.__graph_dict[vertex1].append(vertex2) + if start_obj.name in self.__graph_dict.keys(): + self.__graph_dict[start_obj.name].append(end_obj.name) else: raise AttributeError def get_edges(self, start_obj) -> List[str]: - vertex1 = self.convert_id(start_obj).int - if vertex1 in self.__graph_dict.keys(): - return list(self.__graph_dict[vertex1]) + if start_obj.name in self.__graph_dict.keys(): + return list(self.__graph_dict[start_obj.name]) else: raise AttributeError @@ -173,10 +169,10 @@ def __generate_edges(self) -> list: return edges def prune_vertex_from_edge(self, parent_obj, child_obj): - vertex1 = self.convert_id(parent_obj).int + vertex1 = parent_obj.name if child_obj is None: return - vertex2 = self.convert_id(child_obj).int + vertex2 = child_obj.name if ( vertex1 in self.__graph_dict.keys() @@ -184,7 +180,7 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): ): del self.__graph_dict[vertex1][self.__graph_dict[vertex1].index(vertex2)] - def prune(self, key: int): + def prune(self, key: str): if key in self.__graph_dict.keys(): del self.__graph_dict[key] @@ -203,8 +199,8 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: in graph""" try: - start_vertex = self.convert_id(start_obj).int - end_vertex = self.convert_id(end_obj).int + start_vertex = start_obj.name + end_vertex = end_obj.name except TypeError: start_vertex = start_obj end_vertex = end_obj @@ -226,8 +222,8 @@ def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to end_vertex in graph""" - start_vertex = self.convert_id(start_obj).int - end_vertex = self.convert_id(end_obj).int + start_vertex = start_obj.name + end_vertex = end_obj.name graph = self.__graph_dict path = path + [start_vertex] @@ -254,7 +250,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: :return: :rtype: """ - end_vertex = self.convert_id(end_obj).int + end_vertex = end_obj.name path_length = sys.maxsize optimum_path = [] @@ -291,49 +287,6 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return True return False - def _nested_get(self, obj_type: str) -> List[int]: - """Access a nested object in root by key sequence.""" - extracted_list = [] - for key, item in self.__graph_dict.items(): - if obj_type in item.type: - extracted_list.append(key) - return extracted_list - - @staticmethod - def convert_id(input_value) -> UUID: - """Sometimes we're dopy and""" - if not validate_id(input_value): - input_value = unique_id(input_value) - return input_value - - @staticmethod - def convert_id_to_key(input_value: Union[object, UUID]) -> int: - """Sometimes we're dopy and""" - if not validate_id(input_value): - input_value: UUID = unique_id(input_value) - return input_value.int - def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." - -def unique_id(obj) -> UUID: - """Produce a unique integer id for the object. - - Object must me *hashable*. Id is a UUID and should be unique - across Python invocations. - - """ - return uniqueidmap[obj] - - -def validate_id(potential_id) -> bool: - test = True - try: - if isinstance(potential_id, UUID): - UUID(str(potential_id), version=4) - else: - UUID(potential_id, version=4) - except (ValueError, AttributeError): - test = False - return test From 5bb1f7f2c89316c5e2b1e609128a55eee5636657 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 13:02:12 +0200 Subject: [PATCH 02/57] replace convert_id calls with .name --- src/easyscience/Fitting/Constraints.py | 15 +++-------- src/easyscience/Fitting/fitting_template.py | 7 ++---- src/easyscience/Objects/Graph.py | 6 ----- src/easyscience/Objects/Groups.py | 6 ++--- src/easyscience/Objects/virtual.py | 18 ++++++------- src/easyscience/Utils/Hugger/Property.py | 28 ++++++++++----------- src/easyscience/Utils/classTools.py | 4 +-- src/easyscience/Utils/io/template.py | 2 +- tests/integration_tests/test_undoRedo.py | 4 +-- tests/unit_tests/Fitting/test_fitting.py | 4 +-- tests/unit_tests/Objects/test_Groups.py | 8 +++--- tests/unit_tests/Objects/test_Virtual.py | 2 +- 12 files changed, 43 insertions(+), 61 deletions(-) diff --git a/src/easyscience/Fitting/Constraints.py b/src/easyscience/Fitting/Constraints.py index c2405c32..96a4778b 100644 --- a/src/easyscience/Fitting/Constraints.py +++ b/src/easyscience/Fitting/Constraints.py @@ -43,18 +43,18 @@ def __init__( value: Optional[Number] = None, ): self.aeval = Interpreter() - self.dependent_obj_ids = self.get_key(dependent_obj) + self.dependent_obj_ids = dependent_obj.name self.independent_obj_ids = None self._enabled = True self.external = False self._finalizer = None if independent_obj is not None: if isinstance(independent_obj, list): - self.independent_obj_ids = [self.get_key(obj) for obj in independent_obj] + self.independent_obj_ids = [obj.name for obj in independent_obj] if self.dependent_obj_ids in self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') else: - self.independent_obj_ids = self.get_key(independent_obj) + self.independent_obj_ids = independent_obj.name if self.dependent_obj_ids == self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') # Test if dependent is a parameter or a descriptor. @@ -147,15 +147,6 @@ def _parse_operator(self, obj: V, *args, **kwargs) -> Number: def __repr__(self): pass - def get_key(self, obj) -> int: - """ - Get the unique key of a EasyScience object - - :param obj: EasyScience object - :return: key for EasyScience object - """ - return self._borg.map.convert_id_to_key(obj) - def get_obj(self, key: int) -> V: """ Get an EasyScience object from its unique key diff --git a/src/easyscience/Fitting/fitting_template.py b/src/easyscience/Fitting/fitting_template.py index 4e10e399..947f485e 100644 --- a/src/easyscience/Fitting/fitting_template.py +++ b/src/easyscience/Fitting/fitting_template.py @@ -251,15 +251,12 @@ def __init__(self): self._borg = borg - def get_name_from_key(self, item_key: int) -> str: + def get_name_from_key(self, item_key: str) -> str: return getattr(self._borg.map.get_item_by_key(item_key), 'name', '') - def get_item_from_key(self, item_key: int) -> object: + def get_item_from_key(self, item_key: str) -> object: return self._borg.map.get_item_by_key(item_key) - def get_key(self, item: object) -> int: - return self._borg.map.convert_id_to_key(item) - class FitError(Exception): def __init__(self, e: Exception = None): diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 0b910e07..f559ad28 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -7,13 +7,7 @@ import sys import weakref -from collections import defaultdict from typing import List -from typing import Union -from uuid import UUID -from uuid import uuid4 -from weakref import WeakKeyDictionary - class _EntryList(list): def __init__(self, *args, my_type=None, **kwargs): diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 8d3bdfe1..07fdd9d1 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -75,8 +75,8 @@ def __init__( for key, item in kwargs.items(): _kwargs[key] = item for arg in args: - kwargs[str(borg.map.convert_id_to_key(arg))] = arg - _kwargs[str(borg.map.convert_id_to_key(arg))] = arg + kwargs[arg.name] = arg + _kwargs[arg.name] = arg # Set kwargs, also useful for serialization self._kwargs = NotarizedDict(**_kwargs) @@ -109,7 +109,7 @@ def insert(self, index: int, value: Union[V, B]) -> None: update_key = list(self._kwargs.keys()) values = list(self._kwargs.values()) # Update the internal dict - new_key = str(borg.map.convert_id_to_key(value)) + new_key = value.name update_key.insert(index, new_key) values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index c938c5d8..8a62fc97 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -28,7 +28,7 @@ def raise_(ex): def _remover(a_obj_id: str, v_obj_id: str): try: # Try to get parent object (might be deleted) - a_obj = borg.map.get_item_by_key(int(a_obj_id)) + a_obj = borg.map.get_item_by_key(a_obj_id) except ValueError: return if a_obj._constraints["virtual"].get(v_obj_id, False): @@ -93,7 +93,7 @@ def component_realizer(obj: BV, component: str, recursive: bool = True): value = component._kwargs[key] else: value = key - key = value._borg.map.convert_id_to_key(value) + key = value.name if ( getattr(value, "__old_class__", value.__class__) in ec_var.__dict__.values() @@ -131,15 +131,15 @@ def virtualizer(obj: BV) -> BV: constraint = ObjConstraint(new_obj, "", old_obj) constraint.external = True old_obj._constraints["virtual"][ - str(obj._borg.map.convert_id(new_obj).int) + obj.name ] = constraint new_obj._constraints["builtin"] = dict() # setattr(new_obj, "__previous_set", getattr(olobj, "__previous_set", None)) weakref.finalize( new_obj, _remover, - str(borg.map.convert_id(old_obj).int), - str(borg.map.convert_id(new_obj).int), + old_obj.name, + new_obj.name, ) return new_obj @@ -148,7 +148,7 @@ def virtualizer(obj: BV) -> BV: virtual_options = { "_is_virtual": True, "is_virtual": property(fget=lambda self: self._is_virtual), - "_derived_from": property(fget=lambda self: self._borg.map.convert_id(obj).int), + "_derived_from": property(fget=obj.name), "__non_virtual_class__": klass, "realize": realizer, "relalize_component": component_realizer, @@ -177,14 +177,14 @@ def virtualizer(obj: BV) -> BV: v_p._enabled = False constraint = ObjConstraint(v_p, "", obj) constraint.external = True - obj._constraints["virtual"][str(cls._borg.map.convert_id(v_p).int)] = constraint + obj._constraints["virtual"][v_p.name] = constraint v_p._constraints["builtin"] = dict() setattr(v_p, "__previous_set", getattr(obj, "__previous_set", None)) weakref.finalize( v_p, _remover, - str(borg.map.convert_id(obj).int), - str(borg.map.convert_id(v_p).int), + obj.name, + v_p.name, ) else: # In this case, we need to be recursive. diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index ea319e91..739f820b 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if borg.map.convert_id_to_key(var) in borg.map.returned_objs: + if var.name in borg.map.returned_objs: index = borg.map.returned_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if borg.map.convert_id_to_key(self._my_self) in borg.map.created_objs: + if self._my_self.name in borg.map.created_objs: # for edge in route[::-1]: index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if borg.map.convert_id(self._my_self) in borg.map.created_internal: + if self._my_self.name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if borg.map.convert_id_to_key(self._my_self) in borg.map.created_objs: + if self._my_self.name in borg.map.created_objs: index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if borg.map.convert_id_to_key(var) in borg.map.argument_objs: + if var.name in borg.map.argument_objs: index = borg.map.argument_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}" - elif borg.map.convert_id_to_key(var) in borg.map.returned_objs: + elif var.name in borg.map.returned_objs: index = borg.map.returned_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}" - elif borg.map.convert_id_to_key(var) in borg.map.created_objs: - index = borg.map.created_objs.index(borg.map.convert_id_to_key(var)) + elif var.name in borg.map.created_objs: + index = borg.map.created_objs.index(var.name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 4a203718..62c98704 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -61,9 +61,9 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis start_idx = 0 + int(skip_first) ids = [] names = [] - model_id = borg.map.convert_id(model_obj) + model_id = model_obj.name for par in pars: - elem = borg.map.convert_id(par) + elem = par.name route = borg.map.reverse_route(elem, model_id) objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] objs.reverse() diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index 07ebb518..f57c3fd2 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -223,7 +223,7 @@ def runner(o): if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) if hasattr(obj, '_borg') and '@id' not in d: - d['@id'] = str(obj._borg.map.convert_id(obj).int) + d['@id'] = obj.name return d @staticmethod diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index 3c55bc42..8d7f7b95 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -288,8 +288,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: assert borg.stack.redoText() == "Fitting routine" borg.stack.redo() - assert l2.m.raw_value == res.p[f"p{borg.map.convert_id_to_key(l2.m)}"] - assert l2.c.raw_value == res.p[f"p{borg.map.convert_id_to_key(l2.c)}"] + assert l2.m.raw_value == res.p[f"p{l2.m.name}"] + assert l2.c.raw_value == res.p[f"p{l2.c.name}"] # @pytest.mark.parametrize('math_funcs', [pytest.param([Parameter.__iadd__, float.__add__], id='Addition'), diff --git a/tests/unit_tests/Fitting/test_fitting.py b/tests/unit_tests/Fitting/test_fitting.py index fba4fd06..5f948613 100644 --- a/tests/unit_tests/Fitting/test_fitting.py +++ b/tests/unit_tests/Fitting/test_fitting.py @@ -149,11 +149,11 @@ def test_fit_result(genObjs, fit_engine): sp_sin.phase.fixed = False sp_ref1 = { - f"p{sp_sin._borg.map.convert_id_to_key(item1)}": item1.raw_value + f"p{item1.name}": item1.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } sp_ref2 = { - f"p{sp_sin._borg.map.convert_id_to_key(item1)}": item2.raw_value + f"p{item1.name}": item2.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index e0b9757c..eab8d543 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -486,8 +486,8 @@ def test_baseCollection_set_index(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert obj._borg.map.convert_id_to_key(item) in edges - assert obj._borg.map.convert_id_to_key(p2) not in edges + assert item.name in edges + assert p2.name not in edges @pytest.mark.parametrize("cls", class_constructors) @@ -510,8 +510,8 @@ def test_baseCollection_set_index_based(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert obj._borg.map.convert_id_to_key(item) in edges - assert obj._borg.map.convert_id_to_key(p4) not in edges + assert item.name in edges + assert p4.name not in edges @pytest.mark.parametrize("cls", class_constructors) diff --git a/tests/unit_tests/Objects/test_Virtual.py b/tests/unit_tests/Objects/test_Virtual.py index 16fb9fc0..46f71b0e 100644 --- a/tests/unit_tests/Objects/test_Virtual.py +++ b/tests/unit_tests/Objects/test_Virtual.py @@ -52,7 +52,7 @@ def test_virtual_variable_modify(cls): obj.value = new_value assert obj.raw_value == v_obj.raw_value - id_vobj = str(cls._borg.map.convert_id(v_obj).int) + id_vobj = v_obj.name assert id_vobj in list(obj._constraints["virtual"].keys()) del v_obj From 7779d582bcae136d7af1f51dddbd8493120bf349 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:12:23 +0200 Subject: [PATCH 03/57] NameConverter class removed, move name assignment --- src/easyscience/Fitting/DFO_LS.py | 5 ++--- src/easyscience/Fitting/bumps.py | 7 +++---- src/easyscience/Fitting/fitting_template.py | 12 ------------ src/easyscience/Fitting/lmfit.py | 5 ++--- src/easyscience/Objects/Graph.py | 1 + src/easyscience/Objects/ObjectClasses.py | 2 +- src/easyscience/Objects/Variable.py | 2 +- 7 files changed, 10 insertions(+), 24 deletions(-) diff --git a/src/easyscience/Fitting/DFO_LS.py b/src/easyscience/Fitting/DFO_LS.py index 53ab0ff9..8985f77a 100644 --- a/src/easyscience/Fitting/DFO_LS.py +++ b/src/easyscience/Fitting/DFO_LS.py @@ -16,7 +16,6 @@ from easyscience.Fitting.fitting_template import FitError from easyscience.Fitting.fitting_template import FitResults from easyscience.Fitting.fitting_template import FittingTemplate -from easyscience.Fitting.fitting_template import NameConverter from easyscience.Fitting.fitting_template import np @@ -60,7 +59,7 @@ def make_func(x, y, weights): par["p" + str(name)] = item.raw_value else: for item in pars: - par["p" + str(NameConverter().get_key(item))] = item.raw_value + par["p" + item.name] = item.raw_value def residuals(x0) -> np.ndarray: for idx, par_name in enumerate(par.keys()): @@ -89,7 +88,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/Fitting/bumps.py b/src/easyscience/Fitting/bumps.py index b7b19a78..ebe16609 100644 --- a/src/easyscience/Fitting/bumps.py +++ b/src/easyscience/Fitting/bumps.py @@ -19,7 +19,6 @@ from easyscience.Fitting.fitting_template import FitError from easyscience.Fitting.fitting_template import FitResults from easyscience.Fitting.fitting_template import FittingTemplate -from easyscience.Fitting.fitting_template import NameConverter from easyscience.Fitting.fitting_template import np @@ -66,7 +65,7 @@ def make_func(x, y, weights): else: for item in pars: par[ - "p" + str(NameConverter().get_key(item)) + "p" + item.name ] = obj.convert_to_par_object(item) return Curve(fit_func, x, y, dy=weights, **par) @@ -87,7 +86,7 @@ def _generate_fit_function(self) -> Callable: # Get a list of `Parameters` self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) @@ -245,7 +244,7 @@ def convert_to_par_object(obj) -> bumpsParameter: :rtype: bumpsParameter """ return bumpsParameter( - name="p" + str(NameConverter().get_key(obj)), + name="p" + obj.name, value=obj.raw_value, bounds=[obj.min, obj.max], fixed=obj.fixed, diff --git a/src/easyscience/Fitting/fitting_template.py b/src/easyscience/Fitting/fitting_template.py index 947f485e..92a35119 100644 --- a/src/easyscience/Fitting/fitting_template.py +++ b/src/easyscience/Fitting/fitting_template.py @@ -245,18 +245,6 @@ def reduced_chi(self): return self.chi2 / (len(self.x) - self.n_pars) -class NameConverter: - def __init__(self): - from easyscience import borg - - self._borg = borg - - def get_name_from_key(self, item_key: str) -> str: - return getattr(self._borg.map.get_item_by_key(item_key), 'name', '') - - def get_item_from_key(self, item_key: str) -> object: - return self._borg.map.get_item_by_key(item_key) - class FitError(Exception): def __init__(self, e: Exception = None): diff --git a/src/easyscience/Fitting/lmfit.py b/src/easyscience/Fitting/lmfit.py index c77e6036..66f9873d 100644 --- a/src/easyscience/Fitting/lmfit.py +++ b/src/easyscience/Fitting/lmfit.py @@ -20,7 +20,6 @@ from easyscience.Fitting.fitting_template import FitError from easyscience.Fitting.fitting_template import FitResults from easyscience.Fitting.fitting_template import FittingTemplate -from easyscience.Fitting.fitting_template import NameConverter from easyscience.Fitting.fitting_template import np @@ -91,7 +90,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) @@ -241,7 +240,7 @@ def convert_to_par_object(obj) -> lmParameter: :rtype: lmParameter """ return lmParameter( - "p" + str(NameConverter().get_key(obj)), + "p" + obj.name, value=obj.raw_value, vary=not obj.fixed, min=obj.min, diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index f559ad28..cfe1647f 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -130,6 +130,7 @@ def change_type(self, obj, new_type: str): def add_vertex(self, obj: object, obj_type: str = None): name = obj.name + self._store[name] = obj self.__graph_dict[name] = _EntryList() # Add objects type to the list of types self.__graph_dict[name].finalizer = weakref.finalize( diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index f4b60cf4..80722f9f 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -36,11 +36,11 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} def __init__(self, name: str, interface: Optional[iF] = None): + self._name: str = name self._borg = borg self._borg.map.add_vertex(self, obj_type="created") self.interface = interface self.user_data: dict = {} - self._name: str = name @property def _arg_spec(self) -> Set[str]: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index b9816abb..5c1a5a7f 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -108,13 +108,13 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} + self.name: str = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent if parent is not None: self._borg.map.add_edge(parent, self) - self.name: str = name # Attach units if necessary if isinstance(units, ureg.Unit): self._units = ureg.Quantity(1, units=deepcopy(units)) From 0d59e9af7252a5a8abcd5fcd46a71e95594983ab Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:17:20 +0200 Subject: [PATCH 04/57] remove get_key --- tests/unit_tests/Objects/test_BaseObj.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 0fbca274..9a600355 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -428,20 +428,17 @@ def from_pars(cls, a: float): a = A.from_pars(a_start) graph = a._borg.map - def get_key(obj): - return graph.convert_id_to_key(obj) - assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 a_ = Parameter("a", a_end) - assert get_key(a.a) in graph.get_edges(a) + assert a.a.name in graph.get_edges(a) a__ = a.a setattr(a, "a", a_) assert a.a.raw_value == a_end assert len(graph.get_edges(a)) == 1 - assert get_key(a_) in graph.get_edges(a) - assert get_key(a__) not in graph.get_edges(a) + assert a_.name in graph.get_edges(a) + assert a__.name not in graph.get_edges(a) def test_BaseCreation(): From 74dcb289791316da2437d4169730e83c3dd7f3d1 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:35:23 +0200 Subject: [PATCH 05/57] Add check if object name is already taken --- src/easyscience/Objects/Graph.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index cfe1647f..273e7ac0 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -130,7 +130,8 @@ def change_type(self, obj, new_type: str): def add_vertex(self, obj: object, obj_type: str = None): name = obj.name - + if name in self._store.keys(): + raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj self.__graph_dict[name] = _EntryList() # Add objects type to the list of types self.__graph_dict[name].finalizer = weakref.finalize( From 3bedcee01ae900a4af583993d226138e643e3fd4 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 15:51:41 +0200 Subject: [PATCH 06/57] add default name generation to BasedBase and Descriptor --- src/easyscience/Objects/ObjectClasses.py | 17 +++++++++++++++-- src/easyscience/Objects/Variable.py | 17 ++++++++++++++--- tests/unit_tests/Objects/test_Groups.py | 2 +- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 80722f9f..c28825d8 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -16,6 +16,7 @@ from typing import Optional from typing import Set from typing import TypeVar +from typing import Union from easyscience import borg from easyscience.Utils.classTools import addLoggedProp @@ -35,8 +36,10 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: str, interface: Optional[iF] = None): - self._name: str = name + def __init__(self, name: Union(str, None) = None, interface: Optional[iF] = None): + if name is None: + name = self._generate_default_name() + self._name = name self._borg = borg self._borg.map.add_vertex(self, obj_type="created") self.interface = interface @@ -193,6 +196,16 @@ def __dir__(self) -> Iterable[str]: """ new_class_objs = list(k for k in dir(self.__class__) if not k.startswith("_")) return sorted(new_class_objs) + + def _generate_default_name(self) -> str: + """ + Generate a default name for the object. + """ + class_name = self.__class__.__name__ + iterator = 0 + while class_name+"_"+str(iterator) in self._borg.map.vertices(): + iterator += 1 + return class_name+"_"+str(iterator) if TYPE_CHECKING: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 5c1a5a7f..3ccf0052 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -66,8 +66,8 @@ class Descriptor(ComponentSerializer): def __init__( self, - name: str, value: Any, + name: Union(str, None) = None, units: Optional[Union[str, ureg.Unit]] = None, description: Optional[str] = None, url: Optional[str] = None, @@ -107,8 +107,9 @@ def __init__( """ if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} - - self.name: str = name + if name is None: + name = self._generate_default_name() + self._name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent @@ -384,6 +385,16 @@ def to_obj_type(self, data_type: Type[Parameter], *kwargs): def __copy__(self): return self.__class__.from_dict(self.as_dict()) + def _generate_default_name(self) -> str: + """ + Generate a default name for the object. + """ + class_name = self.__class__.__name__ + iterator = 0 + while class_name+"_"+str(iterator) in self._borg.map.vertices(): + iterator += 1 + return class_name+"_"+str(iterator) + V = TypeVar('V', bound=Descriptor) diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index eab8d543..18678ae2 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -494,7 +494,7 @@ def test_baseCollection_set_index(cls): def test_baseCollection_set_index_based(cls): name = "test" p1 = Parameter("p1", 1) - p2 = Parameter("p1", 2) + p2 = Parameter("p2", 2) p3 = Parameter("p3", 3) p4 = Parameter("p4", 4) p5 = Parameter("p5", 5) From 048fdb740e6c34914adcf22db901603d36fdd55d Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 09:48:37 +0200 Subject: [PATCH 07/57] borg _clear method --- src/easyscience/Fitting/Constraints.py | 4 ++-- src/easyscience/Objects/Graph.py | 7 ++++++- src/easyscience/Objects/ObjectClasses.py | 6 +++--- src/easyscience/Objects/Variable.py | 8 ++++---- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/src/easyscience/Fitting/Constraints.py b/src/easyscience/Fitting/Constraints.py index 96a4778b..bc36c7f2 100644 --- a/src/easyscience/Fitting/Constraints.py +++ b/src/easyscience/Fitting/Constraints.py @@ -111,11 +111,11 @@ def __call__(self, *args, no_set: bool = False, **kwargs): return None return independent_objs = None - if isinstance(self.dependent_obj_ids, int): + if isinstance(self.dependent_obj_ids, str): dependent_obj = self.get_obj(self.dependent_obj_ids) else: raise AttributeError - if isinstance(self.independent_obj_ids, int): + if isinstance(self.independent_obj_ids, str): independent_objs = self.get_obj(self.independent_obj_ids) elif isinstance(self.independent_obj_ids, list): independent_objs = [self.get_obj(obj_id) for obj_id in self.independent_obj_ids] diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 273e7ac0..742f6782 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -143,7 +143,7 @@ def add_edge(self, start_obj: object, end_obj: object): if start_obj.name in self.__graph_dict.keys(): self.__graph_dict[start_obj.name].append(end_obj.name) else: - raise AttributeError + raise AttributeError("Start object not in graph.") def get_edges(self, start_obj) -> List[str]: if start_obj.name in self.__graph_dict.keys(): @@ -179,6 +179,7 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): def prune(self, key: str): if key in self.__graph_dict.keys(): del self.__graph_dict[key] + del self._store[key] def find_isolated_vertices(self) -> list: """returns a list of isolated vertices.""" @@ -283,6 +284,10 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return True return False + def _clear(self): + self._store = weakref.WeakValueDictionary() + self.__graph_dict = {} + def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index c28825d8..4956ab13 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -36,11 +36,11 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: Union(str, None) = None, interface: Optional[iF] = None): + def __init__(self, name: str, interface: Optional[iF] = None): + self._borg = borg if name is None: name = self._generate_default_name() self._name = name - self._borg = borg self._borg.map.add_vertex(self, obj_type="created") self.interface = interface self.user_data: dict = {} @@ -244,7 +244,7 @@ def __init__( self._kwargs = kwargs for key in kwargs.keys(): if key in known_keys: - raise AttributeError + raise AttributeError("Kwargs cannot overwrite class attributes in BaseObj.") if issubclass( type(kwargs[key]), (BasedBase, Descriptor) ) or "BaseCollection" in [c.__name__ for c in type(kwargs[key]).__bases__]: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 3ccf0052..7bfcbf83 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( self._args = {'value': None, 'units': ''} if name is None: name = self._generate_default_name() - self._name = name + self.name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent @@ -122,7 +122,7 @@ def __init__( elif isinstance(units, (str, type(None))): self._units = ureg.parse_expression(units) else: - raise AttributeError + raise AttributeError('Units must be a string or a pint unit object') # Clunky method of keeping self.value up to date self._type = type(value) self.__isBooleanValue = isinstance(value, bool) @@ -486,8 +486,8 @@ class Parameter(Descriptor): def __init__( self, - name: str, value: Union[numbers.Number, np.ndarray], + name: str, error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, min: Optional[numbers.Number] = -np.Inf, max: Optional[numbers.Number] = np.Inf, @@ -530,7 +530,7 @@ def __init__( if error < 0: raise ValueError('Standard deviation `error` must be positive') - super().__init__(name, value, **kwargs) + super().__init__(name=name, value=value, **kwargs) self._args['units'] = str(self.unit) # Warnings if we are given a boolean From 62b06a6363295cb451870e477f823252324b06ec Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:41:33 +0200 Subject: [PATCH 08/57] change id from name to unique_name --- src/easyscience/Objects/Graph.py | 38 ++++++++++++------------ src/easyscience/Objects/ObjectClasses.py | 17 +++++++++-- src/easyscience/Objects/Variable.py | 26 ++++++++++++++-- 3 files changed, 56 insertions(+), 25 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 742f6782..50dbe8ff 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -113,23 +113,23 @@ def get_item_by_key(self, item_id: str) -> object: raise ValueError def is_known(self, vertex: object) -> bool: - # All objects should have a 'name' attribute - return vertex.name in self._store.keys() + # All objects should have a 'unique_name' attribute + return vertex.unique_name in self._store.keys() def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - return self.__graph_dict[vertex.name].type + return self.__graph_dict[vertex.unique_name].type def reset_type(self, obj, default_type: str): - if obj.name in self.__graph_dict.keys(): - self.__graph_dict[obj.name].reset_type(default_type) + if obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[obj.unique_name].reset_type(default_type) def change_type(self, obj, new_type: str): - if obj.name in self.__graph_dict.keys(): - self.__graph_dict[obj.name].type = new_type + if obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[obj.unique_name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): - name = obj.name + name = obj.unique_name if name in self._store.keys(): raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj @@ -140,14 +140,14 @@ def add_vertex(self, obj: object, obj_type: str = None): self.__graph_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - if start_obj.name in self.__graph_dict.keys(): - self.__graph_dict[start_obj.name].append(end_obj.name) + if start_obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[start_obj.unique_name].append(end_obj.unique_name) else: raise AttributeError("Start object not in graph.") def get_edges(self, start_obj) -> List[str]: - if start_obj.name in self.__graph_dict.keys(): - return list(self.__graph_dict[start_obj.name]) + if start_obj.unique_name in self.__graph_dict.keys(): + return list(self.__graph_dict[start_obj.unique_name]) else: raise AttributeError @@ -165,10 +165,10 @@ def __generate_edges(self) -> list: return edges def prune_vertex_from_edge(self, parent_obj, child_obj): - vertex1 = parent_obj.name + vertex1 = parent_obj.unique_name if child_obj is None: return - vertex2 = child_obj.name + vertex2 = child_obj.unique_name if ( vertex1 in self.__graph_dict.keys() @@ -196,8 +196,8 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: in graph""" try: - start_vertex = start_obj.name - end_vertex = end_obj.name + start_vertex = start_obj.unique_name + end_vertex = end_obj.unique_name except TypeError: start_vertex = start_obj end_vertex = end_obj @@ -219,8 +219,8 @@ def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to end_vertex in graph""" - start_vertex = start_obj.name - end_vertex = end_obj.name + start_vertex = start_obj.unique_name + end_vertex = end_obj.unique_name graph = self.__graph_dict path = path + [start_vertex] @@ -247,7 +247,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: :return: :rtype: """ - end_vertex = end_obj.name + end_vertex = end_obj.unique_name path_length = sys.maxsize optimum_path = [] diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 4956ab13..ede64c3f 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -36,10 +36,11 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: str, interface: Optional[iF] = None): + def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg - if name is None: - name = self._generate_default_name() + if unique_name is None: + unique_name = self._generate_default_name() + self._unique_name = unique_name self._name = name self._borg.map.add_vertex(self, obj_type="created") self.interface = interface @@ -64,6 +65,16 @@ def __reduce__(self): cls = getattr(self, "__old_class__", self.__class__) return cls.from_dict, (state,) + @property + def unique_name(self) -> str: + """ Get the unique name of the object.""" + return self._unique_name + + @unique_name.setter + def unique_name(self, new_unique_name: str): + """ Set a new unique name for the object. The old name is still kept in the map. """ + self._unique_name = new_unique_name + @property def name(self) -> str: """ diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 7bfcbf83..d55993eb 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -67,8 +67,9 @@ class Descriptor(ComponentSerializer): def __init__( self, value: Any, - name: Union(str, None) = None, + name: str, units: Optional[Union[str, ureg.Unit]] = None, + unique_name: Optional[str] = None, description: Optional[str] = None, url: Optional[str] = None, display_name: Optional[str] = None, @@ -107,8 +108,9 @@ def __init__( """ if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} - if name is None: - name = self._generate_default_name() + if unique_name is None: + unique_name = self._generate_default_name() + self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') @@ -180,6 +182,24 @@ def __reduce__(self): cls = self.__old_class__ return cls.from_dict, (state,) + @property + def unique_name(self) -> str: + """ + Get the unique name of this object. + + :return: Unique name of this object + """ + return self._unique_name + + @unique_name.setter + def unique_name(self, name: str): + """ + Set the unique name of this object. + + :param name: Unique name of this object + """ + self._unique_name = name + @property def display_name(self) -> str: """ From b57a136e5456bca35ad6d271225da1477dff1359 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:46:26 +0200 Subject: [PATCH 09/57] more .name changes --- src/easyscience/Fitting/DFO_LS.py | 4 ++-- src/easyscience/Fitting/bumps.py | 6 +++--- src/easyscience/Fitting/lmfit.py | 4 ++-- tests/unit_tests/Objects/test_BaseObj.py | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/easyscience/Fitting/DFO_LS.py b/src/easyscience/Fitting/DFO_LS.py index 8985f77a..81e7560f 100644 --- a/src/easyscience/Fitting/DFO_LS.py +++ b/src/easyscience/Fitting/DFO_LS.py @@ -59,7 +59,7 @@ def make_func(x, y, weights): par["p" + str(name)] = item.raw_value else: for item in pars: - par["p" + item.name] = item.raw_value + par["p" + item.unique_name] = item.raw_value def residuals(x0) -> np.ndarray: for idx, par_name in enumerate(par.keys()): @@ -88,7 +88,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/Fitting/bumps.py b/src/easyscience/Fitting/bumps.py index ebe16609..8f317a1a 100644 --- a/src/easyscience/Fitting/bumps.py +++ b/src/easyscience/Fitting/bumps.py @@ -65,7 +65,7 @@ def make_func(x, y, weights): else: for item in pars: par[ - "p" + item.name + "p" + item.unique_name ] = obj.convert_to_par_object(item) return Curve(fit_func, x, y, dy=weights, **par) @@ -86,7 +86,7 @@ def _generate_fit_function(self) -> Callable: # Get a list of `Parameters` self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) @@ -244,7 +244,7 @@ def convert_to_par_object(obj) -> bumpsParameter: :rtype: bumpsParameter """ return bumpsParameter( - name="p" + obj.name, + name="p" + obj.unique_name, value=obj.raw_value, bounds=[obj.min, obj.max], fixed=obj.fixed, diff --git a/src/easyscience/Fitting/lmfit.py b/src/easyscience/Fitting/lmfit.py index 66f9873d..a170a567 100644 --- a/src/easyscience/Fitting/lmfit.py +++ b/src/easyscience/Fitting/lmfit.py @@ -90,7 +90,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) @@ -240,7 +240,7 @@ def convert_to_par_object(obj) -> lmParameter: :rtype: lmParameter """ return lmParameter( - "p" + obj.name, + "p" + obj.unique_name, value=obj.raw_value, vary=not obj.fixed, min=obj.min, diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 9a600355..78dda2fc 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -431,14 +431,14 @@ def from_pars(cls, a: float): assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 a_ = Parameter("a", a_end) - assert a.a.name in graph.get_edges(a) + assert a.a.unique_name in graph.get_edges(a) a__ = a.a setattr(a, "a", a_) assert a.a.raw_value == a_end assert len(graph.get_edges(a)) == 1 - assert a_.name in graph.get_edges(a) - assert a__.name not in graph.get_edges(a) + assert a_.unique_name in graph.get_edges(a) + assert a__.unique_name not in graph.get_edges(a) def test_BaseCreation(): From ece545ec160626794f9c2b2f2528d641e5aa93e3 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:51:49 +0200 Subject: [PATCH 10/57] reorder name and value --- src/easyscience/Objects/Variable.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index d55993eb..202c79e1 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -66,8 +66,8 @@ class Descriptor(ComponentSerializer): def __init__( self, - value: Any, name: str, + value: Any, units: Optional[Union[str, ureg.Unit]] = None, unique_name: Optional[str] = None, description: Optional[str] = None, @@ -506,8 +506,8 @@ class Parameter(Descriptor): def __init__( self, - value: Union[numbers.Number, np.ndarray], name: str, + value: Union[numbers.Number, np.ndarray], error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, min: Optional[numbers.Number] = -np.Inf, max: Optional[numbers.Number] = np.Inf, @@ -541,7 +541,7 @@ def __init__( # Set the error self._args = {'value': value, 'units': '', 'error': error} - if not isinstance(value, numbers.Number): + if not isinstance(value, numbers.Number) or isinstance(value, np.ndarray): raise ValueError('In a parameter the `value` must be numeric') if value < min: raise ValueError('`value` can not be less than `min`') From 982b3ca382b8e9a5f173feba4b8e8da5162e6f5d Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 11:50:52 +0200 Subject: [PATCH 11/57] more .name replacements and test fixes --- src/easyscience/Fitting/Constraints.py | 6 ++--- src/easyscience/Objects/Groups.py | 6 ++--- src/easyscience/Objects/virtual.py | 18 +++++++------ src/easyscience/Utils/Hugger/Property.py | 28 ++++++++++---------- src/easyscience/Utils/classTools.py | 4 +-- src/easyscience/Utils/io/template.py | 2 +- tests/integration_tests/test_undoRedo.py | 4 +-- tests/unit_tests/Fitting/test_fitting.py | 4 +-- tests/unit_tests/Objects/test_Groups.py | 8 +++--- tests/unit_tests/Objects/test_Virtual.py | 2 +- tests/unit_tests/utils/io_tests/test_dict.py | 7 ++++- tests/unit_tests/utils/io_tests/test_json.py | 3 +++ tests/unit_tests/utils/io_tests/test_xml.py | 3 ++- 13 files changed, 53 insertions(+), 42 deletions(-) diff --git a/src/easyscience/Fitting/Constraints.py b/src/easyscience/Fitting/Constraints.py index bc36c7f2..46a29c43 100644 --- a/src/easyscience/Fitting/Constraints.py +++ b/src/easyscience/Fitting/Constraints.py @@ -43,18 +43,18 @@ def __init__( value: Optional[Number] = None, ): self.aeval = Interpreter() - self.dependent_obj_ids = dependent_obj.name + self.dependent_obj_ids = dependent_obj.unique_name self.independent_obj_ids = None self._enabled = True self.external = False self._finalizer = None if independent_obj is not None: if isinstance(independent_obj, list): - self.independent_obj_ids = [obj.name for obj in independent_obj] + self.independent_obj_ids = [obj.unique_name for obj in independent_obj] if self.dependent_obj_ids in self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') else: - self.independent_obj_ids = independent_obj.name + self.independent_obj_ids = independent_obj.unique_name if self.dependent_obj_ids == self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') # Test if dependent is a parameter or a descriptor. diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 07fdd9d1..27ae1144 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -75,8 +75,8 @@ def __init__( for key, item in kwargs.items(): _kwargs[key] = item for arg in args: - kwargs[arg.name] = arg - _kwargs[arg.name] = arg + kwargs[arg.unique_name] = arg + _kwargs[arg.unique_name] = arg # Set kwargs, also useful for serialization self._kwargs = NotarizedDict(**_kwargs) @@ -109,7 +109,7 @@ def insert(self, index: int, value: Union[V, B]) -> None: update_key = list(self._kwargs.keys()) values = list(self._kwargs.values()) # Update the internal dict - new_key = value.name + new_key = value.unique_name update_key.insert(index, new_key) values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index 8a62fc97..5049a497 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -48,6 +48,7 @@ def realizer(obj: BV): args = [] if klass in ec_var.__dict__.values(): # is_variable check kwargs = obj.encode_data() + kwargs["unique_name"] = None return klass(**kwargs) else: kwargs = {name: realizer(item) for name, item in obj._kwargs.items()} @@ -93,7 +94,7 @@ def component_realizer(obj: BV, component: str, recursive: bool = True): value = component._kwargs[key] else: value = key - key = value.name + key = value.unique_name if ( getattr(value, "__old_class__", value.__class__) in ec_var.__dict__.values() @@ -131,15 +132,15 @@ def virtualizer(obj: BV) -> BV: constraint = ObjConstraint(new_obj, "", old_obj) constraint.external = True old_obj._constraints["virtual"][ - obj.name + obj.unique_name ] = constraint new_obj._constraints["builtin"] = dict() # setattr(new_obj, "__previous_set", getattr(olobj, "__previous_set", None)) weakref.finalize( new_obj, _remover, - old_obj.name, - new_obj.name, + old_obj.unique_name, + new_obj.unique_name, ) return new_obj @@ -148,7 +149,7 @@ def virtualizer(obj: BV) -> BV: virtual_options = { "_is_virtual": True, "is_virtual": property(fget=lambda self: self._is_virtual), - "_derived_from": property(fget=obj.name), + "_derived_from": property(fget=obj.unique_name), "__non_virtual_class__": klass, "realize": realizer, "relalize_component": component_realizer, @@ -173,18 +174,19 @@ def virtualizer(obj: BV) -> BV: d = obj.encode_data() if hasattr(d, "fixed"): d["fixed"] = True + d["unique_name"] = None v_p = cls(**d) v_p._enabled = False constraint = ObjConstraint(v_p, "", obj) constraint.external = True - obj._constraints["virtual"][v_p.name] = constraint + obj._constraints["virtual"][v_p.unique_name] = constraint v_p._constraints["builtin"] = dict() setattr(v_p, "__previous_set", getattr(obj, "__previous_set", None)) weakref.finalize( v_p, _remover, - obj.name, - v_p.name, + obj.unique_name, + v_p.unique_name, ) else: # In this case, we need to be recursive. diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index 739f820b..efd6f76a 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if var.name in borg.map.returned_objs: + if var.unique_name in borg.map.returned_objs: index = borg.map.returned_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if self._my_self.name in borg.map.created_objs: + if self._my_self.unique_name in borg.map.created_objs: # for edge in route[::-1]: index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if self._my_self.name in borg.map.created_internal: + if self._my_self.unique_name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if self._my_self.name in borg.map.created_objs: + if self._my_self.unique_name in borg.map.created_objs: index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if var.name in borg.map.argument_objs: + if var.unique_name in borg.map.argument_objs: index = borg.map.argument_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.name in borg.map.returned_objs: + elif var.unique_name in borg.map.returned_objs: index = borg.map.returned_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.name in borg.map.created_objs: - index = borg.map.created_objs.index(var.name) + elif var.unique_name in borg.map.created_objs: + index = borg.map.created_objs.index(var.unique_name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 62c98704..03412a8f 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -61,9 +61,9 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis start_idx = 0 + int(skip_first) ids = [] names = [] - model_id = model_obj.name + model_id = model_obj.unique_name for par in pars: - elem = par.name + elem = par.unique_name route = borg.map.reverse_route(elem, model_id) objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] objs.reverse() diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index f57c3fd2..9f37e905 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -223,7 +223,7 @@ def runner(o): if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) if hasattr(obj, '_borg') and '@id' not in d: - d['@id'] = obj.name + d['@id'] = obj.unique_name return d @staticmethod diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index 8d7f7b95..747bd08c 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -288,8 +288,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: assert borg.stack.redoText() == "Fitting routine" borg.stack.redo() - assert l2.m.raw_value == res.p[f"p{l2.m.name}"] - assert l2.c.raw_value == res.p[f"p{l2.c.name}"] + assert l2.m.raw_value == res.p[f"p{l2.m.unique_name}"] + assert l2.c.raw_value == res.p[f"p{l2.c.unique_name}"] # @pytest.mark.parametrize('math_funcs', [pytest.param([Parameter.__iadd__, float.__add__], id='Addition'), diff --git a/tests/unit_tests/Fitting/test_fitting.py b/tests/unit_tests/Fitting/test_fitting.py index 5f948613..450c3d1e 100644 --- a/tests/unit_tests/Fitting/test_fitting.py +++ b/tests/unit_tests/Fitting/test_fitting.py @@ -149,11 +149,11 @@ def test_fit_result(genObjs, fit_engine): sp_sin.phase.fixed = False sp_ref1 = { - f"p{item1.name}": item1.raw_value + f"p{item1.unique_name}": item1.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } sp_ref2 = { - f"p{item1.name}": item2.raw_value + f"p{item1.unique_name}": item2.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index 18678ae2..d292efea 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -486,8 +486,8 @@ def test_baseCollection_set_index(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert item.name in edges - assert p2.name not in edges + assert item.unique_name in edges + assert p2.unique_name not in edges @pytest.mark.parametrize("cls", class_constructors) @@ -510,8 +510,8 @@ def test_baseCollection_set_index_based(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert item.name in edges - assert p4.name not in edges + assert item.unique_name in edges + assert p4.unique_name not in edges @pytest.mark.parametrize("cls", class_constructors) diff --git a/tests/unit_tests/Objects/test_Virtual.py b/tests/unit_tests/Objects/test_Virtual.py index 46f71b0e..7c610887 100644 --- a/tests/unit_tests/Objects/test_Virtual.py +++ b/tests/unit_tests/Objects/test_Virtual.py @@ -52,7 +52,7 @@ def test_virtual_variable_modify(cls): obj.value = new_value assert obj.raw_value == v_obj.raw_value - id_vobj = v_obj.name + id_vobj = v_obj.unique_name assert id_vobj in list(obj._constraints["virtual"].keys()) del v_obj diff --git a/tests/unit_tests/utils/io_tests/test_dict.py b/tests/unit_tests/utils/io_tests/test_dict.py index bca42a54..5f4b6b2a 100644 --- a/tests/unit_tests/utils/io_tests/test_dict.py +++ b/tests/unit_tests/utils/io_tests/test_dict.py @@ -18,6 +18,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: @@ -215,7 +216,7 @@ def __init__(self, a, b): except metadata.PackageNotFoundError: version = '0.0.0' - obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) + obj = B(Descriptor("a", 1.0, unique_name="a"), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) expected = { "@module": "tests.unit_tests.utils.io_tests.test_dict", @@ -237,6 +238,7 @@ def __init__(self, a, b): "name": "a", "enabled": True, "value": 1.0, + "unique_name": "a", "url": "", }, } @@ -247,6 +249,7 @@ def test_custom_class_full_decode_with_numpy(): obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) + borg.map._clear() obj2 = B.decode(full_enc, decoder=DictSerializer) assert obj.name == obj2.name assert obj.a.raw_value == obj2.a.raw_value @@ -267,6 +270,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) + borg.map._clear() dec = dp_cls.decode(enc, decoder=DictSerializer) for k in data_dict.keys(): @@ -287,6 +291,7 @@ def test_variable_DictSerializer_from_dict(dp_kwargs: dict, dp_cls: Type[Descrip data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) + borg.map._clear() dec = dp_cls.from_dict(enc) for k in data_dict.keys(): diff --git a/tests/unit_tests/utils/io_tests/test_json.py b/tests/unit_tests/utils/io_tests/test_json.py index 92d8f3cf..e48787b7 100644 --- a/tests/unit_tests/utils/io_tests/test_json.py +++ b/tests/unit_tests/utils/io_tests/test_json.py @@ -15,6 +15,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: @@ -178,6 +179,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonSerializer) + borg.map._clear() assert isinstance(enc, str) dec = obj.decode(enc, decoder=JsonSerializer) @@ -199,5 +201,6 @@ def test_variable_DataDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descri data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonDataSerializer) + borg.map._clear() with pytest.raises(NotImplementedError): dec = obj.decode(enc, decoder=JsonDataSerializer) diff --git a/tests/unit_tests/utils/io_tests/test_xml.py b/tests/unit_tests/utils/io_tests/test_xml.py index ef8ac422..b1d35040 100644 --- a/tests/unit_tests/utils/io_tests/test_xml.py +++ b/tests/unit_tests/utils/io_tests/test_xml.py @@ -14,7 +14,7 @@ from .test_core import Descriptor from .test_core import dp_param_dict from .test_core import skip_dict - +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: """ @@ -115,6 +115,7 @@ def test_variable_XMLDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descrip assert isinstance(enc, str) data_xml = ET.XML(enc) assert data_xml.tag == "data" + borg.map._clear() dec = dp_cls.decode(enc, decoder=XMLSerializer) for k in data_dict.keys(): From 3d5a65b0d452c0917ea96f5da8f1ff9022145da4 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:37:36 +0200 Subject: [PATCH 12/57] Fix tests --- src/easyscience/Fitting/DFO_LS.py | 2 +- src/easyscience/Fitting/bumps.py | 6 +++--- src/easyscience/Fitting/lmfit.py | 2 +- src/easyscience/Objects/Graph.py | 14 ++++++++++++-- src/easyscience/Objects/ObjectClasses.py | 11 +---------- src/easyscience/Objects/Variable.py | 12 +----------- src/easyscience/Objects/virtual.py | 2 +- src/easyscience/Utils/Hugger/Property.py | 2 +- tests/unit_tests/Objects/test_BaseObj.py | 3 +++ tests/unit_tests/Objects/test_Groups.py | 14 +++++++++++--- 10 files changed, 35 insertions(+), 33 deletions(-) diff --git a/src/easyscience/Fitting/DFO_LS.py b/src/easyscience/Fitting/DFO_LS.py index 81e7560f..f9219a59 100644 --- a/src/easyscience/Fitting/DFO_LS.py +++ b/src/easyscience/Fitting/DFO_LS.py @@ -106,7 +106,7 @@ def fit_function(x: np.ndarray, **kwargs): # Update the `Parameter` values and the callback if needed # TODO THIS IS NOT THREAD SAFE :-( for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): # This will take into account constraints if self._cached_pars[par_name].raw_value != value: diff --git a/src/easyscience/Fitting/bumps.py b/src/easyscience/Fitting/bumps.py index 8f317a1a..dcdc89b7 100644 --- a/src/easyscience/Fitting/bumps.py +++ b/src/easyscience/Fitting/bumps.py @@ -103,7 +103,7 @@ def fit_function(x: np.ndarray, **kwargs): """ # Update the `Parameter` values and the callback if needed for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): if self._cached_pars[par_name].raw_value != value: self._cached_pars[par_name].value = value @@ -270,7 +270,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): borg.stack.beginMacro("Fitting routine") for index, name in enumerate(self._cached_model._pnames): - dict_name = int(name[1:]) + dict_name = name[1:] pars[dict_name].value = fit_result.x[index] pars[dict_name].error = fit_result.dx[index] if stack_status: @@ -293,7 +293,7 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: pars = self._cached_pars item = {} for index, name in enumerate(self._cached_model._pnames): - dict_name = int(name[1:]) + dict_name = name[1:] item[name] = pars[dict_name].raw_value results.p0 = self.p_0 results.p = item diff --git a/src/easyscience/Fitting/lmfit.py b/src/easyscience/Fitting/lmfit.py index a170a567..cbc12721 100644 --- a/src/easyscience/Fitting/lmfit.py +++ b/src/easyscience/Fitting/lmfit.py @@ -108,7 +108,7 @@ def fit_function(x: np.ndarray, **kwargs): # Update the `Parameter` values and the callback if needed # TODO THIS IS NOT THREAD SAFE :-( for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): # This will take into account constraints if self._cached_pars[par_name].raw_value != value: diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 50dbe8ff..de5dcd92 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -73,6 +73,8 @@ def __init__(self): self._store = weakref.WeakValueDictionary() # A dict with object names as keys and a list of their object types as values, with weak references self.__graph_dict = {} + # A dictionary of class names and their corresponding default name_generator iterators + self._name_iterator_dict = {} def vertices(self) -> List[str]: """returns the vertices of a graph""" @@ -106,11 +108,19 @@ def _nested_get(self, obj_type: str) -> List[str]: extracted_list.append(key) return extracted_list + def _get_name_iterator(self, class_name: str) -> int: + """Get the iterator for the name generator for a class""" + if class_name in self._name_iterator_dict.keys(): + self._name_iterator_dict[class_name] += 1 + return self._name_iterator_dict[class_name] - 1 + else: + self._name_iterator_dict[class_name] = 1 + return 0 def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] - raise ValueError + raise ValueError("Item not in graph.") def is_known(self, vertex: object) -> bool: # All objects should have a 'unique_name' attribute @@ -198,7 +208,7 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: try: start_vertex = start_obj.unique_name end_vertex = end_obj.unique_name - except TypeError: + except AttributeError: start_vertex = start_obj end_vertex = end_obj diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index ede64c3f..ed8a5fd1 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -39,7 +39,7 @@ class BasedBase(ComponentSerializer): def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg if unique_name is None: - unique_name = self._generate_default_name() + unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) self._unique_name = unique_name self._name = name self._borg.map.add_vertex(self, obj_type="created") @@ -208,15 +208,6 @@ def __dir__(self) -> Iterable[str]: new_class_objs = list(k for k in dir(self.__class__) if not k.startswith("_")) return sorted(new_class_objs) - def _generate_default_name(self) -> str: - """ - Generate a default name for the object. - """ - class_name = self.__class__.__name__ - iterator = 0 - while class_name+"_"+str(iterator) in self._borg.map.vertices(): - iterator += 1 - return class_name+"_"+str(iterator) if TYPE_CHECKING: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 202c79e1..2cda2194 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} if unique_name is None: - unique_name = self._generate_default_name() + unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated @@ -405,16 +405,6 @@ def to_obj_type(self, data_type: Type[Parameter], *kwargs): def __copy__(self): return self.__class__.from_dict(self.as_dict()) - def _generate_default_name(self) -> str: - """ - Generate a default name for the object. - """ - class_name = self.__class__.__name__ - iterator = 0 - while class_name+"_"+str(iterator) in self._borg.map.vertices(): - iterator += 1 - return class_name+"_"+str(iterator) - V = TypeVar('V', bound=Descriptor) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index 5049a497..3e970dc6 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -149,7 +149,7 @@ def virtualizer(obj: BV) -> BV: virtual_options = { "_is_virtual": True, "is_virtual": property(fget=lambda self: self._is_virtual), - "_derived_from": property(fget=obj.unique_name), + "_derived_from": property(fget=lambda self: obj.unique_name), "__non_virtual_class__": klass, "realize": realizer, "relalize_component": component_realizer, diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index efd6f76a..03864e51 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -117,7 +117,7 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: if self._my_self.unique_name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 - index = borg.map.created_objs.index( + index = borg.map.created_internal.index( self._my_self.unique_name ) temp += ( diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 78dda2fc..197dbcae 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -20,6 +20,7 @@ from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter from easyscience.Utils.io.dict import DictSerializer +from easyscience import borg @pytest.fixture @@ -194,6 +195,7 @@ def check_dict(check, item): if isinstance(check, dict) and isinstance(item, dict): if "@module" in item.keys(): with not_raises([ValueError, AttributeError]): + borg.map._clear() this_obj = DictSerializer().decode(item) for key in check.keys(): @@ -222,6 +224,7 @@ def test_baseobj_dir(setup_pars): "get_fit_parameters", "get_parameters", "interface", + "unique_name", "name", "par1", "par2", diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index d292efea..94177f90 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -14,6 +14,7 @@ from easyscience.Objects.ObjectClasses import BaseObj from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter +from easyscience import borg test_dict = { "@module": "easyscience.Objects.Groups", @@ -28,6 +29,7 @@ "name": "par1", "value": 1, "units": "dimensionless", + "unique_name": "BaseCollection_0", "description": "", "url": "", "display_name": "par1", @@ -307,6 +309,7 @@ def test_baseCollection_dir(cls): "constraints", "get_fit_parameters", "append", + "unique_name", "index", "as_dict", "clear", @@ -346,6 +349,10 @@ def check_dict(dict_1: dict, dict_2: dict): del keys_1[keys_1.index("@id")] if "@id" in keys_2: del keys_2[keys_2.index("@id")] + if "unique_name" in keys_1: + del keys_1[keys_1.index("unique_name")] + if "unique_name" in keys_2: + del keys_2[keys_2.index("unique_name")] assert not set(keys_1).difference(set(keys_2)) @@ -442,6 +449,7 @@ def test_baseCollection_iterator_dict(cls): obj = cls(name, *l_object) d = obj.as_dict() + borg.map._clear() obj2 = cls.from_dict(d) for index, item in enumerate(obj2): @@ -549,11 +557,11 @@ def test_basecollectionGraph(cls): name = "test" v = [1, 2] p = [Parameter(f"p{i}", v[i]) for i in range(len(v))] - p_id = [G.convert_id_to_key(_p) for _p in p] + p_id = [_p.unique_name for _p in p] bb = cls(name, *p) - bb_id = G.convert_id_to_key(bb) + bb_id = bb.unique_name b = Beta("b", bb=bb) - b_id = G.convert_id_to_key(b) + b_id = b.unique_name for _id in p_id: assert _id in G.get_edges(bb) assert len(p) == len(G.get_edges(bb)) From 09ac66b8e1b9799906aa417c5d1a0e93dbf424e5 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:41:33 +0200 Subject: [PATCH 13/57] Ruff --- src/easyscience/Objects/Graph.py | 1 + src/easyscience/Objects/Groups.py | 1 - src/easyscience/Objects/ObjectClasses.py | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index de5dcd92..90977aec 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -9,6 +9,7 @@ import weakref from typing import List + class _EntryList(list): def __init__(self, *args, my_type=None, **kwargs): super(_EntryList, self).__init__(*args, **kwargs) diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 27ae1144..1aecd177 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -17,7 +17,6 @@ from typing import Tuple from typing import Union -from easyscience import borg from easyscience.Objects.ObjectClasses import BasedBase from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Utils.UndoRedo import NotarizedDict diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index ed8a5fd1..ae187961 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -16,7 +16,6 @@ from typing import Optional from typing import Set from typing import TypeVar -from typing import Union from easyscience import borg from easyscience.Utils.classTools import addLoggedProp From 2305785f1420e895c4cfeafa22a8ed094803509f Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:51:01 +0200 Subject: [PATCH 14/57] tox fix --- src/easyscience/Objects/Variable.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 2cda2194..c28b23d6 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -499,8 +499,8 @@ def __init__( name: str, value: Union[numbers.Number, np.ndarray], error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, - min: Optional[numbers.Number] = -np.Inf, - max: Optional[numbers.Number] = np.Inf, + min: Optional[numbers.Number] = -np.inf, + max: Optional[numbers.Number] = np.inf, fixed: Optional[bool] = False, **kwargs, ): From c6a7224df6f657d6547f3d7a2766f29cddd5cfac Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:58:56 +0200 Subject: [PATCH 15/57] fix tox errors --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 76304217..eeab60d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,10 +34,11 @@ dependencies = [ "bumps", "DFO-LS", "lmfit", - "numpy", + "numpy==1.26", # Should be updated to numpy 2.0 "pint", "uncertainties", - "xarray" + "xarray", + "pint==0.23" # Only to ensure that unit is reported as dimensionless rather than empty string ] [project.optional-dependencies] From 473a844dc7ea5b7974c291bf5c7823da94054281 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Mon, 1 Jul 2024 14:09:55 +0200 Subject: [PATCH 16/57] Update map on unique_name change + code cleanup --- src/easyscience/Objects/Graph.py | 9 ++++---- src/easyscience/Objects/ObjectClasses.py | 17 ++++++++++++++-- src/easyscience/Objects/Variable.py | 26 ++++++++++++++++-------- 3 files changed, 37 insertions(+), 15 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 90977aec..3a5291e1 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -111,12 +111,11 @@ def _nested_get(self, obj_type: str) -> List[str]: def _get_name_iterator(self, class_name: str) -> int: """Get the iterator for the name generator for a class""" - if class_name in self._name_iterator_dict.keys(): - self._name_iterator_dict[class_name] += 1 - return self._name_iterator_dict[class_name] - 1 + if class_name not in self._name_iterator_dict.keys(): + self._name_iterator_dict[class_name] = 0 else: - self._name_iterator_dict[class_name] = 1 - return 0 + self._name_iterator_dict[class_name] += 1 + return self._name_iterator_dict[class_name] def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index ae187961..97208c64 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -38,7 +38,7 @@ class BasedBase(ComponentSerializer): def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg if unique_name is None: - unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) + unique_name = self._unique_name_generator() self._unique_name = unique_name self._name = name self._borg.map.add_vertex(self, obj_type="created") @@ -71,8 +71,13 @@ def unique_name(self) -> str: @unique_name.setter def unique_name(self, new_unique_name: str): - """ Set a new unique name for the object. The old name is still kept in the map. """ + """ Set a new unique name for the object. The old name is still kept in the map. + + :param new_unique_name: New unique name for the object""" + if not isinstance(new_unique_name, str): + raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name + self._borg.map.add_vertex(self) @property def name(self) -> str: @@ -198,6 +203,14 @@ def get_fit_parameters(self) -> List[Parameter]: fit_list.append(item) return fit_list + def _unique_name_generator(self) -> str: + """ + Generate a generic unique name for the object using the class name and a global iterator. + """ + class_name = self.__class__.__name__ + iterator_string = str(self._borg.map._get_name_iterator(class_name)) + return class_name + "_" + iterator_string + def __dir__(self) -> Iterable[str]: """ This creates auto-completion and helps out in iPython notebooks. diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index c28b23d6..e55a8e7d 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} if unique_name is None: - unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) + unique_name = self._unique_name_generator() self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated @@ -192,13 +192,14 @@ def unique_name(self) -> str: return self._unique_name @unique_name.setter - def unique_name(self, name: str): - """ - Set the unique name of this object. - - :param name: Unique name of this object - """ - self._unique_name = name + def unique_name(self, new_unique_name: str): + """ Set a new unique name for the object. The old name is still kept in the map. + + :param new_unique_name: New unique name for the object""" + if not isinstance(new_unique_name, str): + raise TypeError("Unique name has to be a string.") + self._unique_name = new_unique_name + self._borg.map.add_vertex(self) @property def display_name(self) -> str: @@ -362,6 +363,15 @@ def convert_unit(self, unit_str: str): self._args['value'] = self.raw_value self._args['units'] = str(self.unit) + def _unique_name_generator(self) -> str: + """ + Generate a generic unique name for the object using the class name and a global iterator. + """ + class_name = self.__class__.__name__ + iterator_string = str(self._borg.map._get_name_iterator(class_name)) + return class_name + "_" + iterator_string + + # @cached_property @property def compatible_units(self) -> List[str]: From 0453369d51867e96f564747f8a026a7584da82d0 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Mon, 1 Jul 2024 16:26:05 +0200 Subject: [PATCH 17/57] Clear graph unittest --- src/easyscience/Objects/Graph.py | 2 ++ tests/unit_tests/Objects/test_graph.py | 25 ++++++++++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 3a5291e1..734bf147 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -295,8 +295,10 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return False def _clear(self): + """ Reset the graph to an empty state. """ self._store = weakref.WeakValueDictionary() self.__graph_dict = {} + self._name_iterator_dict = {} def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 2dbe6915..207b7a59 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -1,7 +1,26 @@ -__author__ = "github.com/wardsimon" -__version__ = "0.1.0" - # SPDX-FileCopyrightText: 2023 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause # © 2021-2023 Contributors to the EasyScience project Date: Tue, 2 Jul 2024 10:09:52 +0200 Subject: [PATCH 18/57] test_add_vertex --- tests/unit_tests/Objects/test_graph.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 207b7a59..92f8e9cd 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -13,8 +13,7 @@ class TestGraph: def clear(self): borg.map._clear() - def test_clear(self): - borg.map._clear() + def test_clear(self, clear): test_obj = BaseObj("test") assert len(borg.map._store) == 1 assert len(borg.map._Graph__graph_dict) == 1 @@ -24,3 +23,13 @@ def test_clear(self): assert borg.map._Graph__graph_dict == {} assert borg.map._name_iterator_dict == {} + def test_add_vertex(self, clear): + test_obj = BaseObj("test") + assert len(borg.map._store) == 1 + assert len(borg.map._Graph__graph_dict) == 1 + assert borg.map._name_iterator_dict == {"BaseObj": 0} + + @pytest.mark.parametrize("name", ["test", "test2", "test3"]) + def test_clear_fixture(self, name, clear): + test_obj= BaseObj(name, unique_name=name) + assert len(borg.map._store) == 1 \ No newline at end of file From 8d39a97c90db0262fc3eb60dc2d4c46162c6d40b Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 2 Jul 2024 10:20:15 +0200 Subject: [PATCH 19/57] Rename borg to global_object --- examples_old/example4.py | 8 +- examples_old/example5_broken.py | 10 +- examples_old/example6_broken.py | 8 +- src/easyscience/Fitting/Constraints.py | 12 +-- src/easyscience/Fitting/DFO_LS.py | 16 +-- src/easyscience/Fitting/Fitting.py | 4 +- src/easyscience/Fitting/bumps.py | 16 +-- src/easyscience/Fitting/lmfit.py | 16 +-- src/easyscience/Objects/Groups.py | 16 +-- src/easyscience/Objects/ObjectClasses.py | 32 +++--- src/easyscience/Objects/Variable.py | 46 ++++----- .../Objects/{Borg.py => global_object.py} | 4 +- src/easyscience/Objects/virtual.py | 14 +-- src/easyscience/Utils/Hugger/Property.py | 60 ++++++------ src/easyscience/Utils/UndoRedo.py | 12 +-- src/easyscience/Utils/classTools.py | 6 +- src/easyscience/Utils/decorators.py | 4 +- src/easyscience/Utils/io/template.py | 2 +- src/easyscience/__init__.py | 8 +- tests/integration_tests/test_undoRedo.py | 98 +++++++++---------- tests/unit_tests/Objects/test_BaseObj.py | 10 +- .../Objects/test_Descriptor_Parameter.py | 4 +- tests/unit_tests/Objects/test_Groups.py | 12 +-- tests/unit_tests/Objects/test_graph.py | 26 ++--- tests/unit_tests/utils/io_tests/test_dict.py | 8 +- tests/unit_tests/utils/io_tests/test_json.py | 6 +- tests/unit_tests/utils/io_tests/test_xml.py | 4 +- 27 files changed, 231 insertions(+), 231 deletions(-) rename src/easyscience/Objects/{Borg.py => global_object.py} (91%) diff --git a/examples_old/example4.py b/examples_old/example4.py index f5858f69..ff279401 100644 --- a/examples_old/example4.py +++ b/examples_old/example4.py @@ -9,7 +9,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting.Fitting import Fitter from easyscience.Objects.core import ComponentSerializer from easyscience.Objects.ObjectClasses import BaseObj @@ -84,7 +84,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -171,7 +171,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -224,7 +224,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): diff --git a/examples_old/example5_broken.py b/examples_old/example5_broken.py index 3ac38a9d..d968f8bd 100644 --- a/examples_old/example5_broken.py +++ b/examples_old/example5_broken.py @@ -8,7 +8,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting.Fitting import Fitter from easyscience.Objects.Base import BaseObj from easyscience.Objects.Base import Parameter @@ -86,7 +86,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -173,7 +173,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -226,7 +226,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): @@ -338,7 +338,7 @@ def __repr__(self): return f"Line: m={self.m}, c={self.c}" -borg.debug = True +global_object.debug = True interface = InterfaceFactory() line = Line(interface_factory=interface) diff --git a/examples_old/example6_broken.py b/examples_old/example6_broken.py index e3a1183f..f2dde1fe 100644 --- a/examples_old/example6_broken.py +++ b/examples_old/example6_broken.py @@ -8,7 +8,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting.Fitting import Fitter from easyscience.Objects.Base import BaseObj from easyscience.Objects.Base import Parameter @@ -92,7 +92,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -183,7 +183,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -264,7 +264,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): diff --git a/src/easyscience/Fitting/Constraints.py b/src/easyscience/Fitting/Constraints.py index 46a29c43..ce1c1aec 100644 --- a/src/easyscience/Fitting/Constraints.py +++ b/src/easyscience/Fitting/Constraints.py @@ -21,7 +21,7 @@ import numpy as np from asteval import Interpreter -from easyscience import borg +from easyscience import global_object from easyscience.Objects.core import ComponentSerializer if TYPE_CHECKING: @@ -33,7 +33,7 @@ class ConstraintBase(ComponentSerializer, metaclass=ABCMeta): A base class used to describe a constraint to be applied to EasyScience base objects. """ - _borg = borg + _global_object = global_object def __init__( self, @@ -62,7 +62,7 @@ def __init__( if dependent_obj.__class__.__name__ == 'Parameter': if not dependent_obj.enabled: raise AssertionError('A dependent object needs to be initially enabled.') - if borg.debug: + if global_object.debug: print(f'Dependent variable {dependent_obj}. It should be a `Descriptor`.' f'Setting to fixed') dependent_obj.enabled = False self._finalizer = weakref.finalize(self, cleanup_constraint, self.dependent_obj_ids, True) @@ -154,7 +154,7 @@ def get_obj(self, key: int) -> V: :param key: an EasyScience objects unique key :return: EasyScience object """ - return self._borg.map.get_item_by_key(key) + return self._global_object.map.get_item_by_key(key) C = TypeVar('C', bound=ConstraintBase) @@ -476,8 +476,8 @@ def __repr__(self) -> str: def cleanup_constraint(obj_id: str, enabled: bool): try: - obj = borg.map.get_item_by_key(obj_id) + obj = global_object.map.get_item_by_key(obj_id) obj.enabled = enabled except ValueError: - if borg.debug: + if global_object.debug: print(f'Object with ID {obj_id} has already been deleted') diff --git a/src/easyscience/Fitting/DFO_LS.py b/src/easyscience/Fitting/DFO_LS.py index f9219a59..7d6f67ad 100644 --- a/src/easyscience/Fitting/DFO_LS.py +++ b/src/easyscience/Fitting/DFO_LS.py @@ -170,11 +170,11 @@ def fit( for key in self._cached_pars.keys() } - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: model_results = self.dfols_fit(model, **kwargs) @@ -212,15 +212,15 @@ def _set_parameter_fit_result( :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars if stack_status: for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro("Fitting routine") + global_object.stack.enabled = True + global_object.stack.beginMacro("Fitting routine") error_matrix = self._error_from_jacobian( fit_result.jacobian, fit_result.resid, ci @@ -230,7 +230,7 @@ def _set_parameter_fit_result( par.error = error_matrix[idx, idx] if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: """ diff --git a/src/easyscience/Fitting/Fitting.py b/src/easyscience/Fitting/Fitting.py index 7cb8ac10..3ffaad77 100644 --- a/src/easyscience/Fitting/Fitting.py +++ b/src/easyscience/Fitting/Fitting.py @@ -19,7 +19,7 @@ import numpy as np import easyscience.Fitting as Fitting -from easyscience import borg +from easyscience import global_object from easyscience import default_fitting_engine from easyscience.Objects.Groups import BaseCollection @@ -36,7 +36,7 @@ class Fitter: Wrapper to the fitting engines """ - _borg = borg + _global_object = global_object def __init__(self, fit_object: Optional[B] = None, fit_function: Optional[Callable] = None): self._fit_object = fit_object diff --git a/src/easyscience/Fitting/bumps.py b/src/easyscience/Fitting/bumps.py index dcdc89b7..a99a7b2c 100644 --- a/src/easyscience/Fitting/bumps.py +++ b/src/easyscience/Fitting/bumps.py @@ -199,11 +199,11 @@ def fit( for key in self._cached_pars.keys() } problem = FitProblem(model) - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: model_results = bumps_fit( @@ -258,7 +258,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars @@ -266,15 +266,15 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro("Fitting routine") + global_object.stack.enabled = True + global_object.stack.beginMacro("Fitting routine") for index, name in enumerate(self._cached_model._pnames): dict_name = name[1:] pars[dict_name].value = fit_result.x[index] pars[dict_name].error = fit_result.dx[index] if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: """ diff --git a/src/easyscience/Fitting/lmfit.py b/src/easyscience/Fitting/lmfit.py index cbc12721..0602f53f 100644 --- a/src/easyscience/Fitting/lmfit.py +++ b/src/easyscience/Fitting/lmfit.py @@ -193,11 +193,11 @@ def fit( minimizer_kwargs = {"fit_kws": minimizer_kwargs} minimizer_kwargs.update(engine_kwargs) - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: if model is None: @@ -257,15 +257,15 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars if stack_status: for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro("Fitting routine") + global_object.stack.enabled = True + global_object.stack.beginMacro("Fitting routine") for name in pars.keys(): pars[name].value = fit_result.params["p" + str(name)].value if fit_result.errorbars: @@ -273,7 +273,7 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) else: pars[name].error = 0.0 if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results: ModelResult, **kwargs) -> FitResults: """ diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 1aecd177..896e9dad 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -83,8 +83,8 @@ def __init__( for key in kwargs.keys(): if key in self.__dict__.keys() or key in self.__slots__: raise AttributeError(f'Given kwarg: `{key}`, is an internal attribute. Please rename.') - self._borg.map.add_edge(self, kwargs[key]) - self._borg.map.reset_type(kwargs[key], 'created_internal') + self._global_object.map.add_edge(self, kwargs[key]) + self._global_object.map.reset_type(kwargs[key], 'created_internal') if interface is not None: kwargs[key].interface = interface # TODO wrap getter and setter in Logger @@ -113,8 +113,8 @@ def insert(self, index: int, value: Union[V, B]) -> None: values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) # ADD EDGE - self._borg.map.add_edge(self, value) - self._borg.map.reset_type(value, 'created_internal') + self._global_object.map.add_edge(self, value) + self._global_object.map.reset_type(value, 'created_internal') value.interface = self.interface else: raise AttributeError('Only EasyScience objects can be put into an EasyScience group') @@ -173,11 +173,11 @@ def __setitem__(self, key: int, value: Union[B, V]) -> None: update_dict = {update_key[key]: value} self._kwargs.update(update_dict) # ADD EDGE - self._borg.map.add_edge(self, value) - self._borg.map.reset_type(value, 'created_internal') + self._global_object.map.add_edge(self, value) + self._global_object.map.reset_type(value, 'created_internal') value.interface = self.interface # REMOVE EDGE - self._borg.map.prune_vertex_from_edge(self, old_item) + self._global_object.map.prune_vertex_from_edge(self, old_item) else: raise NotImplementedError('At the moment only numerical values or EasyScience objects can be set.') @@ -192,7 +192,7 @@ def __delitem__(self, key: int) -> None: """ keys = list(self._kwargs.keys()) item = self._kwargs[keys[key]] - self._borg.map.prune_vertex_from_edge(self, item) + self._global_object.map.prune_vertex_from_edge(self, item) del self._kwargs[keys[key]] def __len__(self) -> int: diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 97208c64..10500613 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -17,7 +17,7 @@ from typing import Set from typing import TypeVar -from easyscience import borg +from easyscience import global_object from easyscience.Utils.classTools import addLoggedProp from .core import ComponentSerializer @@ -31,17 +31,17 @@ class BasedBase(ComponentSerializer): - __slots__ = ["_name", "_borg", "user_data", "_kwargs"] + __slots__ = ["_name", "_global_object", "user_data", "_kwargs"] _REDIRECT = {} def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): - self._borg = borg + self._global_object = global_object if unique_name is None: unique_name = self._unique_name_generator() self._unique_name = unique_name self._name = name - self._borg.map.add_vertex(self, obj_type="created") + self._global_object.map.add_vertex(self, obj_type="created") self.interface = interface self.user_data: dict = {} @@ -77,7 +77,7 @@ def unique_name(self, new_unique_name: str): if not isinstance(new_unique_name, str): raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name - self._borg.map.add_vertex(self) + self._global_object.map.add_vertex(self) @property def name(self) -> str: @@ -132,11 +132,11 @@ def generate_bindings(self): ) interfaceable_children = [ key - for key in self._borg.map.get_edges(self) - if issubclass(type(self._borg.map.get_item_by_key(key)), BasedBase) + for key in self._global_object.map.get_edges(self) + if issubclass(type(self._global_object.map.get_item_by_key(key)), BasedBase) ] for child_key in interfaceable_children: - child = self._borg.map.get_item_by_key(child_key) + child = self._global_object.map.get_item_by_key(child_key) child.interface = self.interface self.interface.generate_bindings(self) @@ -208,7 +208,7 @@ def _unique_name_generator(self) -> str: Generate a generic unique name for the object using the class name and a global iterator. """ class_name = self.__class__.__name__ - iterator_string = str(self._borg.map._get_name_iterator(class_name)) + iterator_string = str(self._global_object.map._get_name_iterator(class_name)) return class_name + "_" + iterator_string def __dir__(self) -> Iterable[str]: @@ -262,8 +262,8 @@ def __init__( if issubclass( type(kwargs[key]), (BasedBase, Descriptor) ) or "BaseCollection" in [c.__name__ for c in type(kwargs[key]).__bases__]: - self._borg.map.add_edge(self, kwargs[key]) - self._borg.map.reset_type(kwargs[key], "created_internal") + self._global_object.map.add_edge(self, kwargs[key]) + self._global_object.map.reset_type(kwargs[key], "created_internal") addLoggedProp( self, key, @@ -296,8 +296,8 @@ def __init__(self, foo: Parameter, bar: Parameter): :return: None """ self._kwargs[key] = component - self._borg.map.add_edge(self, component) - self._borg.map.reset_type(component, "created_internal") + self._global_object.map.add_edge(self, component) + self._global_object.map.reset_type(component, "created_internal") addLoggedProp( self, key, @@ -322,13 +322,13 @@ def __setattr__(self, key: str, value: BV) -> None: ): if issubclass(type(getattr(self, key, None)), (BasedBase, Descriptor)): old_obj = self.__getattribute__(key) - self._borg.map.prune_vertex_from_edge(self, old_obj) + self._global_object.map.prune_vertex_from_edge(self, old_obj) self._add_component(key, value) else: if hasattr(self, key) and issubclass(type(value), (BasedBase, Descriptor)): old_obj = self.__getattribute__(key) - self._borg.map.prune_vertex_from_edge(self, old_obj) - self._borg.map.add_edge(self, value) + self._global_object.map.prune_vertex_from_edge(self, old_obj) + self._global_object.map.add_edge(self, value) super(BaseObj, self).__setattr__(key, value) # Update the interface bindings if something changed (BasedBase and Descriptor) if old_obj is not None: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index e55a8e7d..da9b60ed 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -27,7 +27,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience import pint from easyscience import ureg from easyscience.Fitting.Constraints import SelfConstraint @@ -55,7 +55,7 @@ class Descriptor(ComponentSerializer): """ _constructor = Q_ - _borg = borg + _global_object = global_object _REDIRECT = { 'value': lambda obj: obj.raw_value, 'units': lambda obj: obj._args['units'], @@ -113,10 +113,10 @@ def __init__( self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated - self._borg.map.add_vertex(self, obj_type='created') + self._global_object.map.add_vertex(self, obj_type='created') # Make the connection between self and parent if parent is not None: - self._borg.map.add_edge(parent, self) + self._global_object.map.add_edge(parent, self) # Attach units if necessary if isinstance(units, ureg.Unit): @@ -199,7 +199,7 @@ def unique_name(self, new_unique_name: str): if not isinstance(new_unique_name, str): raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name - self._borg.map.add_vertex(self) + self._global_object.map.add_vertex(self) @property def display_name(self) -> str: @@ -305,7 +305,7 @@ def value(self, value: Any): :return: None """ if not self.enabled: - if borg.debug: + if global_object.debug: raise CoreSetException(f'{str(self)} is not enabled.') return self.__deepValueSetter(value) @@ -368,7 +368,7 @@ def _unique_name_generator(self) -> str: Generate a generic unique name for the object using the class name and a global iterator. """ class_name = self.__class__.__name__ - iterator_string = str(self._borg.map._get_name_iterator(class_name)) + iterator_string = str(self._global_object.map._get_name_iterator(class_name)) return class_name + "_" + iterator_string @@ -464,15 +464,15 @@ def _property_value(self, set_value: Union[numbers.Number, np.ndarray, Q_]): set_value = set_value.magnitude # Save the old state and create the new state old_value = self._value - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: new_value = old_value if set_value in self.available_options: new_value = set_value finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) # Restore to the old state self.__previous_set(self, new_value) @@ -619,13 +619,13 @@ def _property_value(self, set_value: Union[numbers.Number, np.ndarray, M_]) -> N new_value = self.__constraint_runner(constraint_type, set_value) # Then run any user constraints. constraint_type: dict = self.user_constraints - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: new_value = self.__constraint_runner(constraint_type, new_value) finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) # And finally update any virtual constraints constraint_type: dict = self._constraints['virtual'] @@ -720,9 +720,9 @@ def fixed(self, value: bool): :return: None """ if not self.enabled: - if self._borg.stack.enabled: - self._borg.stack.pop() - if borg.debug: + if self._global_object.stack.enabled: + self._global_object.stack.pop() + if global_object.debug: raise CoreSetException(f'{str(self)} is not enabled.') return # TODO Should we try and cast value to bool rather than throw ValueError? @@ -812,13 +812,13 @@ def _quick_set( # Then run any user constraints. if run_user_constraints: constraint_type: dict = self.user_constraints - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: set_value = self.__constraint_runner(constraint_type, set_value) finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) if run_virtual_constraints: # And finally update any virtual constraints constraint_type: dict = self._constraints['virtual'] @@ -841,7 +841,7 @@ def __constraint_runner( continue this_new_value = constraint(no_set=True) if this_new_value != newer_value: - if borg.debug: + if global_object.debug: print(f'Constraint `{constraint}` has been applied') self._value = self.__class__._constructor( value=this_new_value, @@ -870,8 +870,8 @@ def bounds(self, new_bound: Union[Tuple[numbers.Number, numbers.Number], numbers """ # Macro checking and opening for undo/redo close_macro = False - if self._borg.stack.enabled: - self._borg.stack.beginMacro('Setting bounds') + if self._global_object.stack.enabled: + self._global_object.stack.beginMacro('Setting bounds') close_macro = True # Have we only been given a single number (MIN)? if isinstance(new_bound, numbers.Number): @@ -891,4 +891,4 @@ def bounds(self, new_bound: Union[Tuple[numbers.Number, numbers.Number], numbers self.fixed = False # Close the macro if we opened it if close_macro: - self._borg.stack.endMacro() + self._global_object.stack.endMacro() diff --git a/src/easyscience/Objects/Borg.py b/src/easyscience/Objects/global_object.py similarity index 91% rename from src/easyscience/Objects/Borg.py rename to src/easyscience/Objects/global_object.py index c926c3b9..f936447e 100644 --- a/src/easyscience/Objects/Borg.py +++ b/src/easyscience/Objects/global_object.py @@ -12,9 +12,9 @@ @singleton -class Borg: +class GlobalObject: """ - Borg is the assimilated knowledge of `EasyScience`. Every class based on `EasyScience` gets brought + GlobalObject is the assimilated knowledge of `EasyScience`. Every class based on `EasyScience` gets brought into the collective. """ diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index 3e970dc6..b0c8b492 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -14,7 +14,7 @@ from typing import Iterable from typing import MutableSequence -from easyscience import borg +from easyscience import global_object from easyscience.Fitting.Constraints import ObjConstraint if TYPE_CHECKING: @@ -28,7 +28,7 @@ def raise_(ex): def _remover(a_obj_id: str, v_obj_id: str): try: # Try to get parent object (might be deleted) - a_obj = borg.map.get_item_by_key(a_obj_id) + a_obj = global_object.map.get_item_by_key(a_obj_id) except ValueError: return if a_obj._constraints["virtual"].get(v_obj_id, False): @@ -100,15 +100,15 @@ def component_realizer(obj: BV, component: str, recursive: bool = True): in ec_var.__dict__.values() ): continue - component._borg.map.prune_vertex_from_edge( + component._global_object.map.prune_vertex_from_edge( component, component._kwargs[key] ) - component._borg.map.add_edge(component, old_component._kwargs[key]) + component._global_object.map.add_edge(component, old_component._kwargs[key]) component._kwargs[key] = old_component._kwargs[key] done_mapping = False if done_mapping: - obj._borg.map.prune_vertex_from_edge(obj, old_component) - obj._borg.map.add_edge(obj, new_components) + obj._global_object.map.prune_vertex_from_edge(obj, old_component) + obj._global_object.map.add_edge(obj, new_components) obj._kwargs[component] = new_components @@ -128,7 +128,7 @@ def virtualizer(obj: BV) -> BV: # First check if we're already a virtual object if getattr(obj, "_is_virtual", False): new_obj = deepcopy(obj) - old_obj = obj._borg.map.get_item_by_key(obj._derived_from) + old_obj = obj._global_object.map.get_item_by_key(obj._derived_from) constraint = ObjConstraint(new_obj, "", old_obj) constraint.external = True old_obj._constraints["virtual"][ diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index 03864e51..daad8458 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -11,7 +11,7 @@ from typing import Callable from typing import List -from easyscience import borg +from easyscience import global_object from easyscience.Utils.Hugger.Hugger import PatcherFactory from easyscience.Utils.Hugger.Hugger import Store @@ -24,7 +24,7 @@ class LoggedProperty(property): `BaseObj`. """ - _borg = borg + _global_object = global_object def __init__(self, *args, get_id=None, my_self=None, test_class=None, **kwargs): super(LoggedProperty, self).__init__(*args, **kwargs) @@ -52,7 +52,7 @@ def stack_(frame): return test def __get__(self, instance, owner=None): - if not borg.script.enabled: + if not global_object.script.enabled: return super(LoggedProperty, self).__get__(instance, owner) test = self._caller_class(self.test_class) res = super(LoggedProperty, self).__get__(instance, owner) @@ -60,10 +60,10 @@ def __get__(self, instance, owner=None): def result_item(item_to_be_resulted): if item_to_be_resulted is None: return None - if borg.map.is_known(item_to_be_resulted): - borg.map.change_type(item_to_be_resulted, "returned") + if global_object.map.is_known(item_to_be_resulted): + global_object.map.change_type(item_to_be_resulted, "returned") else: - borg.map.add_vertex(item_to_be_resulted, obj_type="returned") + global_object.map.add_vertex(item_to_be_resulted, obj_type="returned") if not test and self._get_id is not None and self._my_self is not None: if not isinstance(res, list): @@ -72,19 +72,19 @@ def result_item(item_to_be_resulted): for item in res: result_item(item) Store().append_log(self.makeEntry("get", res)) - if borg.debug: # noqa: S1006 + if global_object.debug: # noqa: S1006 print( f"I'm {self._my_self} and {self._get_id} has been called from the outside!" ) return res def __set__(self, instance, value): - if not borg.script.enabled: + if not global_object.script.enabled: return super().__set__(instance, value) test = self._caller_class(self.test_class) if not test and self._get_id is not None and self._my_self is not None: Store().append_log(self.makeEntry("set", value)) - if borg.debug: # noqa: S1006 + if global_object.debug: # noqa: S1006 print( f"I'm {self._my_self} and {self._get_id} has been set to {value} from the outside!" ) @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if var.unique_name in borg.map.returned_objs: - index = borg.map.returned_objs.index( + if var.unique_name in global_object.map.returned_objs: + index = global_object.map.returned_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if self._my_self.unique_name in borg.map.created_objs: + if self._my_self.unique_name in global_object.map.created_objs: # for edge in route[::-1]: - index = borg.map.created_objs.index( + index = global_object.map.created_objs.index( self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if self._my_self.unique_name in borg.map.created_internal: + if self._my_self.unique_name in global_object.map.created_internal: # We now have to trace.... - route = borg.map.reverse_route(self._my_self) # noqa: F841 - index = borg.map.created_internal.index( + route = global_object.map.reverse_route(self._my_self) # noqa: F841 + index = global_object.map.created_internal.index( self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if self._my_self.unique_name in borg.map.created_objs: - index = borg.map.created_objs.index( + if self._my_self.unique_name in global_object.map.created_objs: + index = global_object.map.created_objs.index( self._my_self.unique_name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if var.unique_name in borg.map.argument_objs: - index = borg.map.argument_objs.index( + if var.unique_name in global_object.map.argument_objs: + index = global_object.map.argument_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.unique_name in borg.map.returned_objs: - index = borg.map.returned_objs.index( + elif var.unique_name in global_object.map.returned_objs: + index = global_object.map.returned_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.unique_name in borg.map.created_objs: - index = borg.map.created_objs.index(var.unique_name) + elif var.unique_name in global_object.map.created_objs: + index = global_object.map.created_objs.index(var.unique_name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): @@ -158,7 +158,7 @@ class PropertyHugger(PatcherFactory): # Properties are immutable, so need to be set at the parent level. However unlike `FunctionHugger` we can't traverse # the stack to get the parent. So, it and it's name has to be set at initialization. Boo! - _borg = borg + _global_object = global_object def __init__(self, klass, prop_name): super().__init__() @@ -180,7 +180,7 @@ def patch(self): for key, item in self.__patch_ref.items(): func = getattr(self.property, key) if func is not None: - if borg.debug: + if global_object.debug: print(f"Patching property {self.klass.__name__}.{self.prop_name}") patch_function: Callable = item.get("patcher") new_func = patch_function(func) @@ -188,14 +188,14 @@ def patch(self): setattr(self.klass, self.prop_name, property(**option)) def restore(self): - if borg.debug: + if global_object.debug: print(f"Restoring property {self.klass.__name__}.{self.prop_name}") setattr(self.klass, self.prop_name, self.property) def patch_get(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print( f"{self.klass.__name__}.{self.prop_name} has been called with {args[1:]}, {kwargs}" ) @@ -210,7 +210,7 @@ def inner(*args, **kwargs): def patch_set(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print( f"{self.klass.__name__}.{self.prop_name} has been set with {args[1:]}, {kwargs}" ) @@ -223,7 +223,7 @@ def inner(*args, **kwargs): def patch_del(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print(f"{self.klass.__name__}.{self.prop_name} has been deleted.") self._append_log(self.makeEntry("del", None, *args, **kwargs)) return func(*args, **kwargs) diff --git a/src/easyscience/Utils/UndoRedo.py b/src/easyscience/Utils/UndoRedo.py index 115662fc..6753c3c6 100644 --- a/src/easyscience/Utils/UndoRedo.py +++ b/src/easyscience/Utils/UndoRedo.py @@ -18,7 +18,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object class UndoCommand(metaclass=abc.ABCMeta): @@ -59,9 +59,9 @@ def inner(obj, *args, **kwargs): # Only do the work to a NotarizedDict. if hasattr(obj, '_stack_enabled') and obj._stack_enabled: if not kwargs: - borg.stack.push(DictStack(obj, *args)) + global_object.stack.push(DictStack(obj, *args)) else: - borg.stack.push(DictStackReCreate(obj, **kwargs)) + global_object.stack.push(DictStackReCreate(obj, **kwargs)) else: func(obj, *args, **kwargs) @@ -75,7 +75,7 @@ class NotarizedDict(UserDict): def __init__(self, **kwargs): super().__init__(**kwargs) - self._borg = borg + self._global_object = global_object self._stack_enabled = False @classmethod @@ -467,10 +467,10 @@ def wrapper(obj, *args) -> NoReturn: if ret: return - if borg.debug: + if global_object.debug: print(f"I'm {obj} and have been set from {old_value} to {new_value}!") - borg.stack.push(PropertyStack(obj, func, old_value, new_value, **kwargs)) + global_object.stack.push(PropertyStack(obj, func, old_value, new_value, **kwargs)) return functools.update_wrapper(wrapper, func) diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 03412a8f..255d1d13 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -11,7 +11,7 @@ from typing import List from typing import Tuple -from easyscience import borg +from easyscience import global_object from easyscience.Utils.Hugger.Property import LoggedProperty if TYPE_CHECKING: @@ -64,8 +64,8 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis model_id = model_obj.unique_name for par in pars: elem = par.unique_name - route = borg.map.reverse_route(elem, model_id) - objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] + route = global_object.map.reverse_route(elem, model_id) + objs = [getattr(global_object.map.get_item_by_key(r), "name") for r in route] objs.reverse() names.append(".".join(objs[start_idx:])) ids.append(elem.int) diff --git a/src/easyscience/Utils/decorators.py b/src/easyscience/Utils/decorators.py index 72d94f05..89db9d4f 100644 --- a/src/easyscience/Utils/decorators.py +++ b/src/easyscience/Utils/decorators.py @@ -10,7 +10,7 @@ import warnings from time import time -from easyscience import borg +from easyscience import global_object class memoized: @@ -67,7 +67,7 @@ def time_it(func): :return: callable function with timer """ name = func.__module__ + "." + func.__name__ - time_logger = borg.log.getLogger("timer." + name) + time_logger = global_object.log.getLogger("timer." + name) @functools.wraps(func) def _time_it(*args, **kwargs): diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index 9f37e905..04975a00 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -222,7 +222,7 @@ def runner(o): d.update({'value': runner(obj.value)}) # pylint: disable=E1101 if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) - if hasattr(obj, '_borg') and '@id' not in d: + if hasattr(obj, '_global_object') and '@id' not in d: d['@id'] = obj.unique_name return d diff --git a/src/easyscience/__init__.py b/src/easyscience/__init__.py index 91cfb3ac..a6134fc5 100644 --- a/src/easyscience/__init__.py +++ b/src/easyscience/__init__.py @@ -8,11 +8,11 @@ import pint from easyscience.__version__ import __version__ as __version__ -from easyscience.Objects.Borg import Borg +from easyscience.Objects.global_object import GlobalObject default_fitting_engine = 'lmfit' ureg = pint.UnitRegistry() -borg = Borg() -borg.instantiate_stack() -borg.stack.enabled = False +global_object = GlobalObject() +global_object.instantiate_stack() +global_object.stack.enabled = False diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index 747bd08c..37833491 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -31,9 +31,9 @@ def createParam(option): def doUndoRedo(obj, attr, future, additional=""): - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True e = False def getter(_obj, _attr): @@ -46,16 +46,16 @@ def getter(_obj, _attr): previous = getter(obj, attr) setattr(obj, attr, future) assert getter(obj, attr) == future - assert borg.stack.canUndo() - borg.stack.undo() + assert global_object.stack.canUndo() + global_object.stack.undo() assert getter(obj, attr) == previous - assert borg.stack.canRedo() - borg.stack.redo() + assert global_object.stack.canRedo() + global_object.stack.redo() assert getter(obj, attr) == future except Exception as err: e = err finally: - borg.stack.enabled = False + global_object.stack.enabled = False return e @@ -92,9 +92,9 @@ def test_SinglesUndoRedo(idx, test): @pytest.mark.parametrize("value", (True, False)) def test_Parameter_Bounds_UndoRedo(value): - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True p = Parameter("test", 1, enabled=value) assert p.min == -np.inf assert p.max == np.inf @@ -106,7 +106,7 @@ def test_Parameter_Bounds_UndoRedo(value): assert p.bounds == (0, 2) assert p.enabled is True - borg.stack.undo() + global_object.stack.undo() assert p.min == -np.inf assert p.max == np.inf assert p.bounds == (-np.inf, np.inf) @@ -137,9 +137,9 @@ def test_BaseCollectionUndoRedo(): # assert not doUndoRedo(obj, 'name', name2) - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True original_length = len(obj) p = Parameter("slip_in", 50) @@ -151,12 +151,12 @@ def test_BaseCollectionUndoRedo(): assert item == obj_r # Test inserting items - borg.stack.undo() + global_object.stack.undo() assert len(obj) == original_length _ = objs.pop(idx) for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.redo() + global_object.stack.redo() assert len(obj) == original_length + 1 objs.insert(idx, p) for item, obj_r in zip(obj, objs): @@ -168,13 +168,13 @@ def test_BaseCollectionUndoRedo(): assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.undo() + global_object.stack.undo() assert len(obj) == original_length + 1 objs.insert(idx, p) for item, obj_r in zip(obj, objs): assert item == obj_r del objs[idx] - borg.stack.redo() + global_object.stack.redo() assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r @@ -186,45 +186,45 @@ def test_BaseCollectionUndoRedo(): assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.undo() + global_object.stack.undo() for i in range(len(obj)): if i == idx: item = old_item else: item = objs[i] assert obj[i] == item - borg.stack.redo() + global_object.stack.redo() for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.enabled = False + global_object.stack.enabled = False def test_UndoRedoMacros(): items = [createSingleObjs(idx) for idx in range(5)] offset = 5 undo_text = "test_macro" - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True - borg.stack.beginMacro(undo_text) + global_object.stack.enabled = True + global_object.stack.beginMacro(undo_text) values = [item.raw_value for item in items] for item, value in zip(items, values): item.value = value + offset - borg.stack.endMacro() + global_object.stack.endMacro() for item, old_value in zip(items, values): assert item.raw_value == old_value + offset - assert borg.stack.undoText() == undo_text + assert global_object.stack.undoText() == undo_text - borg.stack.undo() + global_object.stack.undo() for item, old_value in zip(items, values): assert item.raw_value == old_value - assert borg.stack.redoText() == undo_text + assert global_object.stack.redoText() == undo_text - borg.stack.redo() + global_object.stack.redo() for item, old_value in zip(items, values): assert item.raw_value == old_value + offset @@ -273,21 +273,21 @@ def __call__(self, x: np.ndarray) -> np.ndarray: except AttributeError: pytest.skip(msg=f"{fit_engine} is not installed") - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True res = f.fit(x, y) # assert l1.c.raw_value == pytest.approx(l2.c.raw_value, rel=l2.c.error * 3) # assert l1.m.raw_value == pytest.approx(l2.m.raw_value, rel=l2.m.error * 3) - assert borg.stack.undoText() == "Fitting routine" + assert global_object.stack.undoText() == "Fitting routine" - borg.stack.undo() + global_object.stack.undo() assert l2.m.raw_value == m_sp assert l2.c.raw_value == c_sp - assert borg.stack.redoText() == "Fitting routine" + assert global_object.stack.redoText() == "Fitting routine" - borg.stack.redo() + global_object.stack.redo() assert l2.m.raw_value == res.p[f"p{l2.m.unique_name}"] assert l2.c.raw_value == res.p[f"p{l2.c.unique_name}"] @@ -306,8 +306,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # result_value = f_fun(a, b) # result_error = (sa ** 2 + sb ** 2) ** 0.5 # -# from easyscience import borg -# borg.stack.enabled = True +# from easyscience import global_object +# global_object.stack.enabled = True # # # Perform basic test # p1 = Parameter('a', a) @@ -315,9 +315,9 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # # p1 = p_fun(p1, p2) # assert float(p1) == result_value -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # # # Perform basic + error @@ -326,10 +326,10 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # p1 = p_fun(p1, p2) # assert float(p1) == result_value # assert p1.error == result_error -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # @@ -340,11 +340,11 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == 'meter / second' -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa # assert str(p1.unit) == 'meter / second' -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == 'meter / second' @@ -368,8 +368,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # result_value = f_fun(a, b) # result_error = ((sa / a) ** 2 + (sb / b) ** 2) ** 0.5 * result_value # -# from easyscience import borg -# borg.stack.enabled = True +# from easyscience import global_object +# global_object.stack.enabled = True # # # Perform basic test # p1 = Parameter('a', a) @@ -377,9 +377,9 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # # p1 = p_fun(p1, p2) # assert float(p1) == result_value -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # # # Perform basic + error @@ -388,10 +388,10 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # p1 = p_fun(p1, p2) # assert float(p1) == result_value # assert p1.error == result_error -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # @@ -402,11 +402,11 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == u_str -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa # assert str(p1.unit) == unit -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == u_str diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 197dbcae..cf71ccb8 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -20,7 +20,7 @@ from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter from easyscience.Utils.io.dict import DictSerializer -from easyscience import borg +from easyscience import global_object @pytest.fixture @@ -195,7 +195,7 @@ def check_dict(check, item): if isinstance(check, dict) and isinstance(item, dict): if "@module" in item.keys(): with not_raises([ValueError, AttributeError]): - borg.map._clear() + global_object.map._clear() this_obj = DictSerializer().decode(item) for key in check.keys(): @@ -363,7 +363,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 @@ -404,7 +404,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 @@ -429,7 +429,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 diff --git a/tests/unit_tests/Objects/test_Descriptor_Parameter.py b/tests/unit_tests/Objects/test_Descriptor_Parameter.py index b8745b0e..477e42b9 100644 --- a/tests/unit_tests/Objects/test_Descriptor_Parameter.py +++ b/tests/unit_tests/Objects/test_Descriptor_Parameter.py @@ -16,7 +16,7 @@ from easyscience.Objects.Variable import CoreSetException from easyscience.Objects.Variable import Descriptor from easyscience.Objects.Variable import Parameter -from easyscience.Objects.Variable import borg +from easyscience.Objects.Variable import global_object from easyscience.Objects.Variable import ureg @@ -132,7 +132,7 @@ def test_Parameter_value_get(element, expected): @pytest.mark.parametrize("enabled", (None, True, False)) @pytest.mark.parametrize("instance", (Descriptor, Parameter), indirect=True) def test_item_value_set(instance, enabled, debug): - borg.debug = debug + global_object.debug = debug set_value = 2 d = instance("test", 1) if enabled is not None: diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index 94177f90..9bda7e7c 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -14,7 +14,7 @@ from easyscience.Objects.ObjectClasses import BaseObj from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter -from easyscience import borg +from easyscience import global_object test_dict = { "@module": "easyscience.Objects.Groups", @@ -449,7 +449,7 @@ def test_baseCollection_iterator_dict(cls): obj = cls(name, *l_object) d = obj.as_dict() - borg.map._clear() + global_object.map._clear() obj2 = cls.from_dict(d) for index, item in enumerate(obj2): @@ -491,7 +491,7 @@ def test_baseCollection_set_index(cls): assert obj[idx] == p2 obj[idx] = p4 assert obj[idx] == p4 - edges = obj._borg.map.get_edges(obj) + edges = obj._global_object.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: assert item.unique_name in edges @@ -515,7 +515,7 @@ def test_baseCollection_set_index_based(cls): assert obj[idx] == p4 obj[idx] = d assert obj[idx] == d - edges = obj._borg.map.get_edges(obj) + edges = obj._global_object.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: assert item.unique_name in edges @@ -551,9 +551,9 @@ class Beta(BaseObj): @pytest.mark.parametrize("cls", class_constructors) def test_basecollectionGraph(cls): - from easyscience import borg + from easyscience import global_object - G = borg.map + G = global_object.map name = "test" v = [1, 2] p = [Parameter(f"p{i}", v[i]) for i in range(len(v))] diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 92f8e9cd..8149d765 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -6,30 +6,30 @@ from easyscience.Objects.Variable import Parameter from easyscience.Objects.ObjectClasses import BaseObj import pytest -from easyscience import borg +from easyscience import global_object class TestGraph: @pytest.fixture def clear(self): - borg.map._clear() + global_object.map._clear() def test_clear(self, clear): test_obj = BaseObj("test") - assert len(borg.map._store) == 1 - assert len(borg.map._Graph__graph_dict) == 1 - assert borg.map._name_iterator_dict == {"BaseObj": 0} - borg.map._clear() - assert len(borg.map._store) == 0 - assert borg.map._Graph__graph_dict == {} - assert borg.map._name_iterator_dict == {} + assert len(global_object.map._store) == 1 + assert len(global_object.map._Graph__graph_dict) == 1 + assert global_object.map._name_iterator_dict == {"BaseObj": 0} + global_object.map._clear() + assert len(global_object.map._store) == 0 + assert global_object.map._Graph__graph_dict == {} + assert global_object.map._name_iterator_dict == {} def test_add_vertex(self, clear): test_obj = BaseObj("test") - assert len(borg.map._store) == 1 - assert len(borg.map._Graph__graph_dict) == 1 - assert borg.map._name_iterator_dict == {"BaseObj": 0} + assert len(global_object.map._store) == 1 + assert len(global_object.map._Graph__graph_dict) == 1 + assert global_object.map._name_iterator_dict == {"BaseObj": 0} @pytest.mark.parametrize("name", ["test", "test2", "test3"]) def test_clear_fixture(self, name, clear): test_obj= BaseObj(name, unique_name=name) - assert len(borg.map._store) == 1 \ No newline at end of file + assert len(global_object.map._store) == 1 \ No newline at end of file diff --git a/tests/unit_tests/utils/io_tests/test_dict.py b/tests/unit_tests/utils/io_tests/test_dict.py index 5f4b6b2a..40286e27 100644 --- a/tests/unit_tests/utils/io_tests/test_dict.py +++ b/tests/unit_tests/utils/io_tests/test_dict.py @@ -18,7 +18,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: @@ -249,7 +249,7 @@ def test_custom_class_full_decode_with_numpy(): obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) - borg.map._clear() + global_object.map._clear() obj2 = B.decode(full_enc, decoder=DictSerializer) assert obj.name == obj2.name assert obj.a.raw_value == obj2.a.raw_value @@ -270,7 +270,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) - borg.map._clear() + global_object.map._clear() dec = dp_cls.decode(enc, decoder=DictSerializer) for k in data_dict.keys(): @@ -291,7 +291,7 @@ def test_variable_DictSerializer_from_dict(dp_kwargs: dict, dp_cls: Type[Descrip data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) - borg.map._clear() + global_object.map._clear() dec = dp_cls.from_dict(enc) for k in data_dict.keys(): diff --git a/tests/unit_tests/utils/io_tests/test_json.py b/tests/unit_tests/utils/io_tests/test_json.py index e48787b7..17442c4a 100644 --- a/tests/unit_tests/utils/io_tests/test_json.py +++ b/tests/unit_tests/utils/io_tests/test_json.py @@ -15,7 +15,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: @@ -179,7 +179,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonSerializer) - borg.map._clear() + global_object.map._clear() assert isinstance(enc, str) dec = obj.decode(enc, decoder=JsonSerializer) @@ -201,6 +201,6 @@ def test_variable_DataDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descri data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonDataSerializer) - borg.map._clear() + global_object.map._clear() with pytest.raises(NotImplementedError): dec = obj.decode(enc, decoder=JsonDataSerializer) diff --git a/tests/unit_tests/utils/io_tests/test_xml.py b/tests/unit_tests/utils/io_tests/test_xml.py index b1d35040..562ceae1 100644 --- a/tests/unit_tests/utils/io_tests/test_xml.py +++ b/tests/unit_tests/utils/io_tests/test_xml.py @@ -14,7 +14,7 @@ from .test_core import Descriptor from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: """ @@ -115,7 +115,7 @@ def test_variable_XMLDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descrip assert isinstance(enc, str) data_xml = ET.XML(enc) assert data_xml.tag == "data" - borg.map._clear() + global_object.map._clear() dec = dp_cls.decode(enc, decoder=XMLSerializer) for k in data_dict.keys(): From 0be9d5217d315affc048ec86968ac055bd673380 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 2 Jul 2024 16:11:19 +0200 Subject: [PATCH 20/57] Restructure sourcecode tree --- src/easyscience/Fitting/Fitting.py | 2 +- src/easyscience/Objects/Groups.py | 2 +- src/easyscience/Objects/Variable.py | 2 +- src/easyscience/Utils/classTools.py | 2 +- src/easyscience/__init__.py | 2 +- src/easyscience/global_object/__init__.py | 4 + .../global_object.py | 13 ++-- .../hugger}/__init__.py | 0 .../hugger/hugger.py} | 0 .../hugger/property.py} | 5 +- .../Logging.py => global_object/logger.py} | 0 .../Graph.py => global_object/map.py} | 78 +++++++++---------- .../undo_redo.py} | 0 tests/unit_tests/Objects/test_graph.py | 10 +-- 14 files changed, 63 insertions(+), 57 deletions(-) create mode 100644 src/easyscience/global_object/__init__.py rename src/easyscience/{Objects => global_object}/global_object.py (83%) rename src/easyscience/{Utils/Hugger => global_object/hugger}/__init__.py (100%) rename src/easyscience/{Utils/Hugger/Hugger.py => global_object/hugger/hugger.py} (100%) rename src/easyscience/{Utils/Hugger/Property.py => global_object/hugger/property.py} (99%) rename src/easyscience/{Utils/Logging.py => global_object/logger.py} (100%) rename src/easyscience/{Objects/Graph.py => global_object/map.py} (81%) rename src/easyscience/{Utils/UndoRedo.py => global_object/undo_redo.py} (100%) diff --git a/src/easyscience/Fitting/Fitting.py b/src/easyscience/Fitting/Fitting.py index 3ffaad77..1496ae93 100644 --- a/src/easyscience/Fitting/Fitting.py +++ b/src/easyscience/Fitting/Fitting.py @@ -19,8 +19,8 @@ import numpy as np import easyscience.Fitting as Fitting -from easyscience import global_object from easyscience import default_fitting_engine +from easyscience import global_object from easyscience.Objects.Groups import BaseCollection _C = TypeVar('_C', bound=ABCMeta) diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 896e9dad..504688e6 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -17,9 +17,9 @@ from typing import Tuple from typing import Union +from easyscience.global_object.undo_redo import NotarizedDict from easyscience.Objects.ObjectClasses import BasedBase from easyscience.Objects.ObjectClasses import Descriptor -from easyscience.Utils.UndoRedo import NotarizedDict if TYPE_CHECKING: from easyscience.Utils.typing import B diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index da9b60ed..119ece45 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -31,10 +31,10 @@ from easyscience import pint from easyscience import ureg from easyscience.Fitting.Constraints import SelfConstraint +from easyscience.global_object.undo_redo import property_stack_deco from easyscience.Objects.core import ComponentSerializer from easyscience.Utils.classTools import addProp from easyscience.Utils.Exceptions import CoreSetException -from easyscience.Utils.UndoRedo import property_stack_deco if TYPE_CHECKING: from easyscience.Utils.typing import C diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 255d1d13..6e29eae7 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -12,7 +12,7 @@ from typing import Tuple from easyscience import global_object -from easyscience.Utils.Hugger.Property import LoggedProperty +from easyscience.global_object.hugger.property import LoggedProperty if TYPE_CHECKING: from easyscience.Utils.typing import BV diff --git a/src/easyscience/__init__.py b/src/easyscience/__init__.py index a6134fc5..fa95da24 100644 --- a/src/easyscience/__init__.py +++ b/src/easyscience/__init__.py @@ -8,7 +8,7 @@ import pint from easyscience.__version__ import __version__ as __version__ -from easyscience.Objects.global_object import GlobalObject +from easyscience.global_object import GlobalObject default_fitting_engine = 'lmfit' diff --git a/src/easyscience/global_object/__init__.py b/src/easyscience/global_object/__init__.py new file mode 100644 index 00000000..0bea5e61 --- /dev/null +++ b/src/easyscience/global_object/__init__.py @@ -0,0 +1,4 @@ +from .global_object import GlobalObject # noqa: F401 +from .hugger.hugger import ScriptManager # noqa: F401 +from .logger import Logger # noqa: F401 +from .map import Map # noqa: F401 diff --git a/src/easyscience/Objects/global_object.py b/src/easyscience/global_object/global_object.py similarity index 83% rename from src/easyscience/Objects/global_object.py rename to src/easyscience/global_object/global_object.py index f936447e..281115f5 100644 --- a/src/easyscience/Objects/global_object.py +++ b/src/easyscience/global_object/global_object.py @@ -5,10 +5,11 @@ __author__ = "github.com/wardsimon" __version__ = "0.1.0" -from easyscience.Objects.Graph import Graph from easyscience.Utils.classUtils import singleton -from easyscience.Utils.Hugger.Hugger import ScriptManager -from easyscience.Utils.Logging import Logger + +from .hugger.hugger import ScriptManager +from .logger import Logger +from .map import Map @singleton @@ -19,7 +20,7 @@ class GlobalObject: """ __log = Logger() - __map = Graph() + __map = Map() __stack = None __debug = False @@ -33,7 +34,7 @@ def __init__(self): # self.script: ScriptManager = ScriptManager() # Map. This is the conduit database between all borg species - self.map: Graph = self.__map + self.map: Map = self.__map def instantiate_stack(self): """ @@ -43,6 +44,6 @@ def instantiate_stack(self): :return: None :rtype: noneType """ - from easyscience.Utils.UndoRedo import UndoStack + from easyscience.global_object.undo_redo import UndoStack self.stack = UndoStack() diff --git a/src/easyscience/Utils/Hugger/__init__.py b/src/easyscience/global_object/hugger/__init__.py similarity index 100% rename from src/easyscience/Utils/Hugger/__init__.py rename to src/easyscience/global_object/hugger/__init__.py diff --git a/src/easyscience/Utils/Hugger/Hugger.py b/src/easyscience/global_object/hugger/hugger.py similarity index 100% rename from src/easyscience/Utils/Hugger/Hugger.py rename to src/easyscience/global_object/hugger/hugger.py diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/global_object/hugger/property.py similarity index 99% rename from src/easyscience/Utils/Hugger/Property.py rename to src/easyscience/global_object/hugger/property.py index daad8458..ac0bfcbd 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/global_object/hugger/property.py @@ -12,8 +12,9 @@ from typing import List from easyscience import global_object -from easyscience.Utils.Hugger.Hugger import PatcherFactory -from easyscience.Utils.Hugger.Hugger import Store + +from .hugger import PatcherFactory +from .hugger import Store class LoggedProperty(property): diff --git a/src/easyscience/Utils/Logging.py b/src/easyscience/global_object/logger.py similarity index 100% rename from src/easyscience/Utils/Logging.py rename to src/easyscience/global_object/logger.py diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/global_object/map.py similarity index 81% rename from src/easyscience/Objects/Graph.py rename to src/easyscience/global_object/map.py index 734bf147..1f89b130 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/global_object/map.py @@ -20,7 +20,7 @@ def __init__(self, *args, my_type=None, **kwargs): self._type.append(my_type) def __repr__(self) -> str: - s = "Graph entry of type: " + s = "Map entry of type: " if self._type: s += ", ".join(self._type) else: @@ -68,21 +68,21 @@ def is_returned(self) -> bool: return "returned" in self._type -class Graph: +class Map: def __init__(self): # A dictionary of object names and their corresponding objects self._store = weakref.WeakValueDictionary() # A dict with object names as keys and a list of their object types as values, with weak references - self.__graph_dict = {} + self.__type_dict = {} # A dictionary of class names and their corresponding default name_generator iterators self._name_iterator_dict = {} def vertices(self) -> List[str]: - """returns the vertices of a graph""" + """returns the vertices of a map""" return list(self._store.keys()) def edges(self): - """returns the edges of a graph""" + """returns the edges of a map""" return self.__generate_edges() @property @@ -104,7 +104,7 @@ def returned_objs(self) -> List[str]: def _nested_get(self, obj_type: str) -> List[str]: """Access a nested object in root by key sequence.""" extracted_list = [] - for key, item in self.__graph_dict.items(): + for key, item in self.__type_dict.items(): if obj_type in item.type: extracted_list.append(key) return extracted_list @@ -120,7 +120,7 @@ def _get_name_iterator(self, class_name: str) -> int: def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] - raise ValueError("Item not in graph.") + raise ValueError("Item not in map.") def is_known(self, vertex: object) -> bool: # All objects should have a 'unique_name' attribute @@ -128,48 +128,48 @@ def is_known(self, vertex: object) -> bool: def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - return self.__graph_dict[vertex.unique_name].type + return self.__type_dict[vertex.unique_name].type def reset_type(self, obj, default_type: str): - if obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[obj.unique_name].reset_type(default_type) + if obj.unique_name in self.__type_dict.keys(): + self.__type_dict[obj.unique_name].reset_type(default_type) def change_type(self, obj, new_type: str): - if obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[obj.unique_name].type = new_type + if obj.unique_name in self.__type_dict.keys(): + self.__type_dict[obj.unique_name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): name = obj.unique_name if name in self._store.keys(): raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj - self.__graph_dict[name] = _EntryList() # Add objects type to the list of types - self.__graph_dict[name].finalizer = weakref.finalize( + self.__type_dict[name] = _EntryList() # Add objects type to the list of types + self.__type_dict[name].finalizer = weakref.finalize( self._store[name], self.prune, name ) - self.__graph_dict[name].type = obj_type + self.__type_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - if start_obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[start_obj.unique_name].append(end_obj.unique_name) + if start_obj.unique_name in self.__type_dict.keys(): + self.__type_dict[start_obj.unique_name].append(end_obj.unique_name) else: - raise AttributeError("Start object not in graph.") + raise AttributeError("Start object not in map.") def get_edges(self, start_obj) -> List[str]: - if start_obj.unique_name in self.__graph_dict.keys(): - return list(self.__graph_dict[start_obj.unique_name]) + if start_obj.unique_name in self.__type_dict.keys(): + return list(self.__type_dict[start_obj.unique_name]) else: raise AttributeError def __generate_edges(self) -> list: """A static method generating the edges of the - graph "graph". Edges are represented as sets + map. Edges are represented as sets with one (a loop back to the vertex) or two vertices """ edges = [] - for vertex in self.__graph_dict: - for neighbour in self.__graph_dict[vertex]: + for vertex in self.__type_dict: + for neighbour in self.__type_dict[vertex]: if {neighbour, vertex} not in edges: edges.append({vertex, neighbour}) return edges @@ -181,19 +181,19 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): vertex2 = child_obj.unique_name if ( - vertex1 in self.__graph_dict.keys() - and vertex2 in self.__graph_dict[vertex1] + vertex1 in self.__type_dict.keys() + and vertex2 in self.__type_dict[vertex1] ): - del self.__graph_dict[vertex1][self.__graph_dict[vertex1].index(vertex2)] + del self.__type_dict[vertex1][self.__type_dict[vertex1].index(vertex2)] def prune(self, key: str): - if key in self.__graph_dict.keys(): - del self.__graph_dict[key] + if key in self.__type_dict.keys(): + del self.__type_dict[key] del self._store[key] def find_isolated_vertices(self) -> list: """returns a list of isolated vertices.""" - graph = self.__graph_dict + graph = self.__type_dict isolated = [] for vertex in graph: print(isolated, vertex) @@ -203,7 +203,7 @@ def find_isolated_vertices(self) -> list: def find_path(self, start_obj, end_obj, path=[]) -> list: """find a path from start_vertex to end_vertex - in graph""" + in map""" try: start_vertex = start_obj.unique_name @@ -212,7 +212,7 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: start_vertex = start_obj end_vertex = end_obj - graph = self.__graph_dict + graph = self.__type_dict path = path + [start_vertex] if start_vertex == end_vertex: return path @@ -227,12 +227,12 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to - end_vertex in graph""" + end_vertex in map""" start_vertex = start_obj.unique_name end_vertex = end_obj.unique_name - graph = self.__graph_dict + graph = self.__type_dict path = path + [start_vertex] if start_vertex == end_vertex: return [path] @@ -263,7 +263,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: optimum_path = [] if start_obj is None: # We now have to find where to begin..... - for possible_start, vertices in self.__graph_dict.items(): + for possible_start, vertices in self.__type_dict.items(): if end_vertex in vertices: temp_path = self.find_path(possible_start, end_vertex) if len(temp_path) < path_length: @@ -275,10 +275,10 @@ def reverse_route(self, end_obj, start_obj=None) -> List: return optimum_path def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: - """determines if the graph is connected""" + """determines if the map is connected""" if vertices_encountered is None: vertices_encountered = set() - graph = self.__graph_dict + graph = self.__type_dict vertices = list(graph.keys()) if not start_vertex: # chose a vertex from graph as a starting point @@ -295,11 +295,11 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return False def _clear(self): - """ Reset the graph to an empty state. """ + """ Reset the map to an empty state. """ self._store = weakref.WeakValueDictionary() - self.__graph_dict = {} + self.__type_dict = {} self._name_iterator_dict = {} def __repr__(self) -> str: - return f"Graph object of {len(self._store)} vertices." + return f"Map object of {len(self._store)} vertices." diff --git a/src/easyscience/Utils/UndoRedo.py b/src/easyscience/global_object/undo_redo.py similarity index 100% rename from src/easyscience/Utils/UndoRedo.py rename to src/easyscience/global_object/undo_redo.py diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 8149d765..8048e823 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -2,13 +2,13 @@ # SPDX-License-Identifier: BSD-3-Clause # © 2021-2023 Contributors to the EasyScience project Date: Wed, 3 Jul 2024 11:54:23 +0200 Subject: [PATCH 21/57] restructure test folder --- tests/integration_tests/__init__.py | 6 ------ .../{Objects/test_graph.py => global_object/test_map.py} | 0 .../global_object/test_undo_redo.py} | 0 3 files changed, 6 deletions(-) delete mode 100644 tests/integration_tests/__init__.py rename tests/unit_tests/{Objects/test_graph.py => global_object/test_map.py} (100%) rename tests/{integration_tests/test_undoRedo.py => unit_tests/global_object/test_undo_redo.py} (100%) diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py deleted file mode 100644 index 22e236a6..00000000 --- a/tests/integration_tests/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# SPDX-FileCopyrightText: 2023 EasyScience contributors -# SPDX-License-Identifier: BSD-3-Clause -# © 2021-2023 Contributors to the EasyScience project Date: Wed, 3 Jul 2024 12:18:24 +0200 Subject: [PATCH 22/57] more tests --- tests/unit_tests/global_object/test_map.py | 36 ++++++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/unit_tests/global_object/test_map.py b/tests/unit_tests/global_object/test_map.py index 8048e823..f7982ff8 100644 --- a/tests/unit_tests/global_object/test_map.py +++ b/tests/unit_tests/global_object/test_map.py @@ -6,6 +6,7 @@ from easyscience.Objects.Variable import Parameter from easyscience.Objects.ObjectClasses import BaseObj import pytest +import gc from easyscience import global_object class TestMap: @@ -14,7 +15,7 @@ def clear(self): global_object.map._clear() def test_clear(self, clear): - test_obj = BaseObj("test") + test_obj = BaseObj(name="test") assert len(global_object.map._store) == 1 assert len(global_object.map._Map__type_dict) == 1 assert global_object.map._name_iterator_dict == {"BaseObj": 0} @@ -24,12 +25,35 @@ def test_clear(self, clear): assert global_object.map._name_iterator_dict == {} def test_add_vertex(self, clear): - test_obj = BaseObj("test") + test_obj = BaseObj(name="test") assert len(global_object.map._store) == 1 assert len(global_object.map._Map__type_dict) == 1 assert global_object.map._name_iterator_dict == {"BaseObj": 0} - @pytest.mark.parametrize("name", ["test", "test2", "test3"]) - def test_clear_fixture(self, name, clear): - test_obj= BaseObj(name, unique_name=name) - assert len(global_object.map._store) == 1 \ No newline at end of file + def test_weakref(self, clear): + test_obj = BaseObj(name="test") + assert len(global_object.map._store) == 1 + assert len(global_object.map._Map__type_dict) == 1 + del test_obj + gc.collect() + assert len(global_object.map._store) == 0 + assert len(global_object.map._Map__type_dict) == 0 + + def test_vertices(self, clear): + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map.vertices() == ["BaseObj_0", "Parameter_0"] + + def test_get_item_by_key(self, clear): + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map.get_item_by_key(test_obj.unique_name) == test_obj + assert global_object.map.get_item_by_key(test_obj2.unique_name) == test_obj2 + + def test_get_name_iterator(self, clear): + assert global_object.map._get_name_iterator("BaseObj") == 0 + assert global_object.map._get_name_iterator("Parameter") == 0 + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map._get_name_iterator("BaseObj") == 2 + assert global_object.map._get_name_iterator("Parameter") == 2 \ No newline at end of file From 64f0d39144065502a5fded88bed96de75045fcbf Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 4 Jul 2024 10:30:23 +0200 Subject: [PATCH 23/57] test for identical unique names --- src/easyscience/Objects/ObjectClasses.py | 3 ++- tests/unit_tests/global_object/test_map.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 10500613..22008d0d 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -238,6 +238,7 @@ class BaseObj(BasedBase): def __init__( self, name: str, + unique_name: Optional[str] = None, *args: Optional[BV], **kwargs: Optional[BV], ): @@ -248,7 +249,7 @@ def __init__( :param args: Any arguments? :param kwargs: Fields which this class should contain """ - super(BaseObj, self).__init__(name) + super(BaseObj, self).__init__(name=name, unique_name=unique_name) # If Parameter or Descriptor is given as arguments... for arg in args: if issubclass(type(arg), (BaseObj, Descriptor)): diff --git a/tests/unit_tests/global_object/test_map.py b/tests/unit_tests/global_object/test_map.py index f7982ff8..f751e7b5 100644 --- a/tests/unit_tests/global_object/test_map.py +++ b/tests/unit_tests/global_object/test_map.py @@ -56,4 +56,13 @@ def test_get_name_iterator(self, clear): test_obj = BaseObj(name="test") test_obj2 = Parameter(value=2.0, name="test2") assert global_object.map._get_name_iterator("BaseObj") == 2 - assert global_object.map._get_name_iterator("Parameter") == 2 \ No newline at end of file + assert global_object.map._get_name_iterator("Parameter") == 2 + + @pytest.mark.parametrize("cls, kwargs", [(BaseObj, {}), (Parameter, {"value": 2.0})]) + def test_identical_unique_names_exception(self, clear, cls, kwargs): + test_obj = cls(name="test", unique_name="test", **kwargs) + with pytest.raises(ValueError): + test_obj2 = cls(name="test2", unique_name="test", **kwargs) + + # test unique_name change + From f2fa102edd0b30a84e5cbb886c38f5b1cba92c07 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Wed, 12 Jun 2024 15:43:03 +0200 Subject: [PATCH 24/57] removed UUIDS from Graph in borg --- src/easyscience/Objects/Graph.py | 131 ++++++++++--------------------- 1 file changed, 42 insertions(+), 89 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 2ca59309..0b910e07 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -73,24 +73,14 @@ def is_returned(self) -> bool: return "returned" in self._type -class UniqueIdMap(WeakKeyDictionary): - def __init__(self, this_dict: dict = None): - super().__init__(self) - # replace data with a defaultdict to generate uuids - self.data = defaultdict(uuid4) - if this_dict is not None: - self.update(this_dict) - - -uniqueidmap = UniqueIdMap() - - class Graph: def __init__(self): + # A dictionary of object names and their corresponding objects self._store = weakref.WeakValueDictionary() + # A dict with object names as keys and a list of their object types as values, with weak references self.__graph_dict = {} - def vertices(self) -> List[int]: + def vertices(self) -> List[str]: """returns the vertices of a graph""" return list(self._store.keys()) @@ -99,63 +89,69 @@ def edges(self): return self.__generate_edges() @property - def argument_objs(self) -> List[int]: + def argument_objs(self) -> List[str]: return self._nested_get("argument") @property - def created_objs(self) -> List[int]: + def created_objs(self) -> List[str]: return self._nested_get("created") @property - def created_internal(self) -> List[int]: + def created_internal(self) -> List[str]: return self._nested_get("created_internal") @property - def returned_objs(self) -> List[int]: + def returned_objs(self) -> List[str]: return self._nested_get("returned") - def get_item_by_key(self, item_id: int) -> object: + def _nested_get(self, obj_type: str) -> List[str]: + """Access a nested object in root by key sequence.""" + extracted_list = [] + for key, item in self.__graph_dict.items(): + if obj_type in item.type: + extracted_list.append(key) + return extracted_list + + + def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] raise ValueError def is_known(self, vertex: object) -> bool: - return self.convert_id(vertex).int in self._store.keys() + # All objects should have a 'name' attribute + return vertex.name in self._store.keys() def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - oid = self.convert_id(vertex) - return self.__graph_dict[oid].type + return self.__graph_dict[vertex.name].type def reset_type(self, obj, default_type: str): - if self.convert_id(obj).int in self.__graph_dict.keys(): - self.__graph_dict[self.convert_id(obj).int].reset_type(default_type) + if obj.name in self.__graph_dict.keys(): + self.__graph_dict[obj.name].reset_type(default_type) def change_type(self, obj, new_type: str): - if self.convert_id(obj).int in self.__graph_dict.keys(): - self.__graph_dict[self.convert_id(obj).int].type = new_type + if obj.name in self.__graph_dict.keys(): + self.__graph_dict[obj.name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): - oid = self.convert_id(obj).int - self._store[oid] = obj - self.__graph_dict[oid] = _EntryList() # Enhanced list of keys - self.__graph_dict[oid].finalizer = weakref.finalize( - self._store[oid], self.prune, oid + name = obj.name + self._store[name] = obj + self.__graph_dict[name] = _EntryList() # Add objects type to the list of types + self.__graph_dict[name].finalizer = weakref.finalize( + self._store[name], self.prune, name ) - self.__graph_dict[oid].type = obj_type + self.__graph_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - vertex1 = self.convert_id(start_obj).int - vertex2 = self.convert_id(end_obj).int - if vertex1 in self.__graph_dict.keys(): - self.__graph_dict[vertex1].append(vertex2) + if start_obj.name in self.__graph_dict.keys(): + self.__graph_dict[start_obj.name].append(end_obj.name) else: raise AttributeError def get_edges(self, start_obj) -> List[str]: - vertex1 = self.convert_id(start_obj).int - if vertex1 in self.__graph_dict.keys(): - return list(self.__graph_dict[vertex1]) + if start_obj.name in self.__graph_dict.keys(): + return list(self.__graph_dict[start_obj.name]) else: raise AttributeError @@ -173,10 +169,10 @@ def __generate_edges(self) -> list: return edges def prune_vertex_from_edge(self, parent_obj, child_obj): - vertex1 = self.convert_id(parent_obj).int + vertex1 = parent_obj.name if child_obj is None: return - vertex2 = self.convert_id(child_obj).int + vertex2 = child_obj.name if ( vertex1 in self.__graph_dict.keys() @@ -184,7 +180,7 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): ): del self.__graph_dict[vertex1][self.__graph_dict[vertex1].index(vertex2)] - def prune(self, key: int): + def prune(self, key: str): if key in self.__graph_dict.keys(): del self.__graph_dict[key] @@ -203,8 +199,8 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: in graph""" try: - start_vertex = self.convert_id(start_obj).int - end_vertex = self.convert_id(end_obj).int + start_vertex = start_obj.name + end_vertex = end_obj.name except TypeError: start_vertex = start_obj end_vertex = end_obj @@ -226,8 +222,8 @@ def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to end_vertex in graph""" - start_vertex = self.convert_id(start_obj).int - end_vertex = self.convert_id(end_obj).int + start_vertex = start_obj.name + end_vertex = end_obj.name graph = self.__graph_dict path = path + [start_vertex] @@ -254,7 +250,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: :return: :rtype: """ - end_vertex = self.convert_id(end_obj).int + end_vertex = end_obj.name path_length = sys.maxsize optimum_path = [] @@ -291,49 +287,6 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return True return False - def _nested_get(self, obj_type: str) -> List[int]: - """Access a nested object in root by key sequence.""" - extracted_list = [] - for key, item in self.__graph_dict.items(): - if obj_type in item.type: - extracted_list.append(key) - return extracted_list - - @staticmethod - def convert_id(input_value) -> UUID: - """Sometimes we're dopy and""" - if not validate_id(input_value): - input_value = unique_id(input_value) - return input_value - - @staticmethod - def convert_id_to_key(input_value: Union[object, UUID]) -> int: - """Sometimes we're dopy and""" - if not validate_id(input_value): - input_value: UUID = unique_id(input_value) - return input_value.int - def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." - -def unique_id(obj) -> UUID: - """Produce a unique integer id for the object. - - Object must me *hashable*. Id is a UUID and should be unique - across Python invocations. - - """ - return uniqueidmap[obj] - - -def validate_id(potential_id) -> bool: - test = True - try: - if isinstance(potential_id, UUID): - UUID(str(potential_id), version=4) - else: - UUID(potential_id, version=4) - except (ValueError, AttributeError): - test = False - return test From 287c6de9779b885b30896944b16edd74c2b4fe87 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 13:02:12 +0200 Subject: [PATCH 25/57] replace convert_id calls with .name --- src/easyscience/Objects/Graph.py | 6 - src/easyscience/Objects/Groups.py | 6 +- src/easyscience/Objects/virtual.py | 10 +- src/easyscience/Utils/Hugger/Property.py | 28 +- src/easyscience/Utils/classTools.py | 4 +- src/easyscience/Utils/io/template.py | 2 +- src/easyscience/fitting/Constraints.py | 15 +- .../fitting/minimizers/minimizer_base.py | 75 +++ tests/integration_tests/test_undoRedo.py | 4 +- tests/unit_tests/Fitting/test_fitting.py | 573 ++++++++++++++++++ tests/unit_tests/Objects/test_Groups.py | 8 +- tests/unit_tests/Objects/test_Virtual.py | 2 +- 12 files changed, 683 insertions(+), 50 deletions(-) create mode 100644 tests/unit_tests/Fitting/test_fitting.py diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 0b910e07..f559ad28 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -7,13 +7,7 @@ import sys import weakref -from collections import defaultdict from typing import List -from typing import Union -from uuid import UUID -from uuid import uuid4 -from weakref import WeakKeyDictionary - class _EntryList(list): def __init__(self, *args, my_type=None, **kwargs): diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 8d3bdfe1..07fdd9d1 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -75,8 +75,8 @@ def __init__( for key, item in kwargs.items(): _kwargs[key] = item for arg in args: - kwargs[str(borg.map.convert_id_to_key(arg))] = arg - _kwargs[str(borg.map.convert_id_to_key(arg))] = arg + kwargs[arg.name] = arg + _kwargs[arg.name] = arg # Set kwargs, also useful for serialization self._kwargs = NotarizedDict(**_kwargs) @@ -109,7 +109,7 @@ def insert(self, index: int, value: Union[V, B]) -> None: update_key = list(self._kwargs.keys()) values = list(self._kwargs.values()) # Update the internal dict - new_key = str(borg.map.convert_id_to_key(value)) + new_key = value.name update_key.insert(index, new_key) values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index e6991e27..f92a314d 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -28,7 +28,7 @@ def raise_(ex): def _remover(a_obj_id: str, v_obj_id: str): try: # Try to get parent object (might be deleted) - a_obj = borg.map.get_item_by_key(int(a_obj_id)) + a_obj = borg.map.get_item_by_key(a_obj_id) except ValueError: return if a_obj._constraints['virtual'].get(v_obj_id, False): @@ -131,8 +131,8 @@ def virtualizer(obj: BV) -> BV: weakref.finalize( new_obj, _remover, - str(borg.map.convert_id(old_obj).int), - str(borg.map.convert_id(new_obj).int), + old_obj.name, + new_obj.name, ) return new_obj @@ -174,8 +174,8 @@ def virtualizer(obj: BV) -> BV: weakref.finalize( v_p, _remover, - str(borg.map.convert_id(obj).int), - str(borg.map.convert_id(v_p).int), + obj.name, + v_p.name, ) else: # In this case, we need to be recursive. diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index ea319e91..739f820b 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if borg.map.convert_id_to_key(var) in borg.map.returned_objs: + if var.name in borg.map.returned_objs: index = borg.map.returned_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if borg.map.convert_id_to_key(self._my_self) in borg.map.created_objs: + if self._my_self.name in borg.map.created_objs: # for edge in route[::-1]: index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if borg.map.convert_id(self._my_self) in borg.map.created_internal: + if self._my_self.name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if borg.map.convert_id_to_key(self._my_self) in borg.map.created_objs: + if self._my_self.name in borg.map.created_objs: index = borg.map.created_objs.index( - borg.map.convert_id_to_key(self._my_self) + self._my_self.name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if borg.map.convert_id_to_key(var) in borg.map.argument_objs: + if var.name in borg.map.argument_objs: index = borg.map.argument_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}" - elif borg.map.convert_id_to_key(var) in borg.map.returned_objs: + elif var.name in borg.map.returned_objs: index = borg.map.returned_objs.index( - borg.map.convert_id_to_key(var) + var.name ) temp += f"{Store().var_ident}{index}" - elif borg.map.convert_id_to_key(var) in borg.map.created_objs: - index = borg.map.created_objs.index(borg.map.convert_id_to_key(var)) + elif var.name in borg.map.created_objs: + index = borg.map.created_objs.index(var.name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 4a203718..62c98704 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -61,9 +61,9 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis start_idx = 0 + int(skip_first) ids = [] names = [] - model_id = borg.map.convert_id(model_obj) + model_id = model_obj.name for par in pars: - elem = borg.map.convert_id(par) + elem = par.name route = borg.map.reverse_route(elem, model_id) objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] objs.reverse() diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index 07ebb518..f57c3fd2 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -223,7 +223,7 @@ def runner(o): if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) if hasattr(obj, '_borg') and '@id' not in d: - d['@id'] = str(obj._borg.map.convert_id(obj).int) + d['@id'] = obj.name return d @staticmethod diff --git a/src/easyscience/fitting/Constraints.py b/src/easyscience/fitting/Constraints.py index cb134b90..bc58ae7c 100644 --- a/src/easyscience/fitting/Constraints.py +++ b/src/easyscience/fitting/Constraints.py @@ -43,18 +43,18 @@ def __init__( value: Optional[Number] = None, ): self.aeval = Interpreter() - self.dependent_obj_ids = self.get_key(dependent_obj) + self.dependent_obj_ids = dependent_obj.name self.independent_obj_ids = None self._enabled = True self.external = False self._finalizer = None if independent_obj is not None: if isinstance(independent_obj, list): - self.independent_obj_ids = [self.get_key(obj) for obj in independent_obj] + self.independent_obj_ids = [obj.name for obj in independent_obj] if self.dependent_obj_ids in self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') else: - self.independent_obj_ids = self.get_key(independent_obj) + self.independent_obj_ids = independent_obj.name if self.dependent_obj_ids == self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') # Test if dependent is a parameter or a descriptor. @@ -147,15 +147,6 @@ def _parse_operator(self, obj: V, *args, **kwargs) -> Number: def __repr__(self): pass - def get_key(self, obj) -> int: - """ - Get the unique key of a EasyScience object - - :param obj: EasyScience object - :return: key for EasyScience object - """ - return self._borg.map.convert_id_to_key(obj) - def get_obj(self, key: int) -> V: """ Get an EasyScience object from its unique key diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index 4c28c878..765766d8 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -173,3 +173,78 @@ def _error_from_jacobian(jacobian: np.ndarray, residuals: np.ndarray, confidence z = stats.norm.pdf(z) error_matrix = z * np.sqrt(error_matrix) return error_matrix + + +class FitResults: + """ + At the moment this is just a dummy way of unifying the returned fit parameters. + """ + + __slots__ = [ + 'success', + 'fitting_engine', + 'fit_args', + 'p', + 'p0', + 'x', + 'x_matrices', + 'y_obs', + 'y_calc', + 'y_err', + 'engine_result', + 'total_results', + ] + + def __init__(self): + self.success = False + self.fitting_engine = None + self.fit_args = {} + self.p = {} + self.p0 = {} + self.x = np.ndarray([]) + self.x_matrices = np.ndarray([]) + self.y_obs = np.ndarray([]) + self.y_calc = np.ndarray([]) + self.y_err = np.ndarray([]) + self.engine_result = None + self.total_results = None + + @property + def n_pars(self): + return len(self.p) + + @property + def residual(self): + return self.y_obs - self.y_calc + + @property + def chi2(self): + return ((self.residual / self.y_err) ** 2).sum() + + @property + def reduced_chi(self): + return self.chi2 / (len(self.x) - self.n_pars) + + +class NameConverter: + def __init__(self): + from easyscience import borg + + self._borg = borg + + def get_name_from_key(self, item_key: str) -> str: + return getattr(self._borg.map.get_item_by_key(item_key), 'name', '') + + def get_item_from_key(self, item_key: str) -> object: + return self._borg.map.get_item_by_key(item_key) + + +class FitError(Exception): + def __init__(self, e: Exception = None): + self.e = e + + def __str__(self) -> str: + s = '' + if self.e is not None: + s = f'{self.e}\n' + return s + 'Something has gone wrong with the fit' diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index fe8f0366..267033c5 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -287,8 +287,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: assert borg.stack.redoText() == "Fitting routine" borg.stack.redo() - assert l2.m.raw_value == res.p[f"p{borg.map.convert_id_to_key(l2.m)}"] - assert l2.c.raw_value == res.p[f"p{borg.map.convert_id_to_key(l2.c)}"] + assert l2.m.raw_value == res.p[f"p{l2.m.name}"] + assert l2.c.raw_value == res.p[f"p{l2.c.name}"] # @pytest.mark.parametrize('math_funcs', [pytest.param([Parameter.__iadd__, float.__add__], id='Addition'), diff --git a/tests/unit_tests/Fitting/test_fitting.py b/tests/unit_tests/Fitting/test_fitting.py new file mode 100644 index 00000000..5f948613 --- /dev/null +++ b/tests/unit_tests/Fitting/test_fitting.py @@ -0,0 +1,573 @@ +# SPDX-FileCopyrightText: 2023 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause +# © 2021-2023 Contributors to the EasyScience project 0 % This does not work as some methods don't calculate error + assert item1.error == pytest.approx(0, abs=1e-1) + assert item1.raw_value == pytest.approx(item2.raw_value, abs=5e-3) + y_calc_ref = ref_sin(x) + assert result.y_calc == pytest.approx(y_calc_ref, abs=1e-2) + assert result.residual == pytest.approx(sp_sin(x) - y_calc_ref, abs=1e-2) + + +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_fit_result(genObjs, fit_engine): + ref_sin = genObjs[0] + sp_sin = genObjs[1] + + x = np.linspace(0, 5, 200) + y = ref_sin(x) + + sp_sin.offset.fixed = False + sp_sin.phase.fixed = False + + sp_ref1 = { + f"p{item1.name}": item1.raw_value + for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) + } + sp_ref2 = { + f"p{item1.name}": item2.raw_value + for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) + } + + f = Fitter(sp_sin, sp_sin) + + if fit_engine is not None: + try: + f.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + + result = f.fit(x, y) + check_fit_results(result, sp_sin, ref_sin, x, sp_ref1=sp_ref1, sp_ref2=sp_ref2) + + +@pytest.mark.parametrize("fit_method", ["leastsq", "powell", "cobyla"]) +def test_lmfit_methods(genObjs, fit_method): + ref_sin = genObjs[0] + sp_sin = genObjs[1] + + x = np.linspace(0, 5, 200) + y = ref_sin(x) + + sp_sin.offset.fixed = False + sp_sin.phase.fixed = False + + f = Fitter(sp_sin, sp_sin) + assert fit_method in f.available_methods() + result = f.fit(x, y, method=fit_method) + check_fit_results(result, sp_sin, ref_sin, x) + + +@pytest.mark.xfail(reason="known bumps issue") +@pytest.mark.parametrize("fit_method", ["newton", "lm"]) +def test_bumps_methods(genObjs, fit_method): + ref_sin = genObjs[0] + sp_sin = genObjs[1] + + x = np.linspace(0, 5, 200) + y = ref_sin(x) + + sp_sin.offset.fixed = False + sp_sin.phase.fixed = False + + f = Fitter(sp_sin, sp_sin) + f.switch_engine("bumps") + assert fit_method in f.available_methods() + result = f.fit(x, y, method=fit_method) + check_fit_results(result, sp_sin, ref_sin, x) + + +@pytest.mark.parametrize("fit_engine", ["lmfit", "bumps", "DFO_LS"]) +def test_fit_constraints(genObjs2, fit_engine): + ref_sin = genObjs2[0] + sp_sin = genObjs2[1] + + x = np.linspace(0, 5, 200) + y = ref_sin(x) + + sp_sin.phase.fixed = False + + f = Fitter(sp_sin, sp_sin) + + assert len(f.fit_constraints()) == 0 + c = ObjConstraint(sp_sin.offset, "2*", sp_sin.phase) + f.add_fit_constraint(c) + + if fit_engine is not None: + try: + f.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + + result = f.fit(x, y) + check_fit_results(result, sp_sin, ref_sin, x) + assert len(f.fit_constraints()) == 1 + f.remove_fit_constraint(0) + assert len(f.fit_constraints()) == 0 + + +# def test_fit_makeModel(genObjs): +# ref_sin = genObjs[0] +# sp_sin = genObjs[1] +# +# x = np.linspace(0, 5, 200) +# y = ref_sin(x) +# +# sp_sin.offset.fixed = False +# sp_sin.phase.fixed = False +# +# f = Fitter(sp_sin, sp_sin) +# model = f.make_model() +# result = f.fit(x, y, model=model) +# check_fit_results(result, sp_sin, ref_sin, x) + + +@pytest.mark.parametrize("with_errors", [False, True]) +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_multi_fit(genObjs, genObjs2, fit_engine, with_errors): + ref_sin1 = genObjs[0] + ref_sin2 = genObjs2[0] + + ref_sin1.offset.user_constraints["ref_sin2"] = ObjConstraint( + ref_sin2.offset, "", ref_sin1.offset + ) + ref_sin1.offset.user_constraints["ref_sin2"]() + + sp_sin1 = genObjs[1] + sp_sin2 = genObjs2[1] + + sp_sin1.offset.user_constraints["sp_sin2"] = ObjConstraint( + sp_sin2.offset, "", sp_sin1.offset + ) + sp_sin1.offset.user_constraints["sp_sin2"]() + + x1 = np.linspace(0, 5, 200) + y1 = ref_sin1(x1) + x2 = np.copy(x1) + y2 = ref_sin2(x2) + + sp_sin1.offset.fixed = False + sp_sin1.phase.fixed = False + sp_sin2.phase.fixed = False + + f = MultiFitter([sp_sin1, sp_sin2], [sp_sin1, sp_sin2]) + if fit_engine is not None: + try: + f.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + + args = [[x1, x2], [y1, y2]] + kwargs = {} + if with_errors: + kwargs["weights"] = [1 / np.sqrt(y1), 1 / np.sqrt(y2)] + + results = f.fit(*args, **kwargs) + X = [x1, x2] + Y = [y1, y2] + F_ref = [ref_sin1, ref_sin2] + F_real = [sp_sin1, sp_sin2] + for idx, result in enumerate(results): + assert result.n_pars == len(sp_sin1.get_fit_parameters()) + len( + sp_sin2.get_fit_parameters() + ) + assert result.chi2 == pytest.approx( + 0, abs=1.5e-3 * (len(result.x) - result.n_pars) + ) + assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.success + assert np.all(result.x == X[idx]) + assert np.all(result.y_obs == Y[idx]) + assert result.y_calc == pytest.approx(F_ref[idx](X[idx]), abs=1e-2) + assert result.residual == pytest.approx( + F_real[idx](X[idx]) - F_ref[idx](X[idx]), abs=1e-2 + ) + + +@pytest.mark.parametrize("with_errors", [False, True]) +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_multi_fit2(genObjs, genObjs2, fit_engine, with_errors): + ref_sin1 = genObjs[0] + ref_sin2 = genObjs2[0] + ref_line = Line.from_pars(1, 4.6) + + ref_sin1.offset.user_constraints["ref_sin2"] = ObjConstraint( + ref_sin2.offset, "", ref_sin1.offset + ) + ref_sin1.offset.user_constraints["ref_line"] = ObjConstraint( + ref_line.m, "", ref_sin1.offset + ) + ref_sin1.offset.user_constraints["ref_sin2"]() + ref_sin1.offset.user_constraints["ref_line"]() + + sp_sin1 = genObjs[1] + sp_sin2 = genObjs2[1] + sp_line = Line.from_pars(0.43, 6.1) + + sp_sin1.offset.user_constraints["sp_sin2"] = ObjConstraint( + sp_sin2.offset, "", sp_sin1.offset + ) + sp_sin1.offset.user_constraints["sp_line"] = ObjConstraint( + sp_line.m, "", sp_sin1.offset + ) + sp_sin1.offset.user_constraints["sp_sin2"]() + sp_sin1.offset.user_constraints["sp_line"]() + + x1 = np.linspace(0, 5, 200) + y1 = ref_sin1(x1) + x3 = np.copy(x1) + y3 = ref_sin2(x3) + x2 = np.copy(x1) + y2 = ref_line(x2) + + sp_sin1.offset.fixed = False + sp_sin1.phase.fixed = False + sp_sin2.phase.fixed = False + sp_line.c.fixed = False + + f = MultiFitter([sp_sin1, sp_line, sp_sin2], [sp_sin1, sp_line, sp_sin2]) + if fit_engine is not None: + try: + f.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + + args = [[x1, x2, x3], [y1, y2, y3]] + kwargs = {} + if with_errors: + kwargs["weights"] = [1 / np.sqrt(y1), 1 / np.sqrt(y2), 1 / np.sqrt(y3)] + + results = f.fit(*args, **kwargs) + X = [x1, x2, x3] + Y = [y1, y2, y3] + F_ref = [ref_sin1, ref_line, ref_sin2] + F_real = [sp_sin1, sp_line, sp_sin2] + + assert len(results) == len(X) + + for idx, result in enumerate(results): + assert result.n_pars == len(sp_sin1.get_fit_parameters()) + len( + sp_sin2.get_fit_parameters() + ) + len(sp_line.get_fit_parameters()) + assert result.chi2 == pytest.approx( + 0, abs=1.5e-3 * (len(result.x) - result.n_pars) + ) + assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.success + assert np.all(result.x == X[idx]) + assert np.all(result.y_obs == Y[idx]) + assert result.y_calc == pytest.approx(F_real[idx](X[idx]), abs=1e-2) + assert result.residual == pytest.approx( + F_ref[idx](X[idx]) - F_real[idx](X[idx]), abs=1e-2 + ) + + +class AbsSin2D(BaseObj): + def __init__(self, offset: Parameter, phase: Parameter): + super(AbsSin2D, self).__init__("sin2D", offset=offset, phase=phase) + + @classmethod + def from_pars(cls, offset, phase): + offset = Parameter("offset", offset) + phase = Parameter("phase", phase) + return cls(offset=offset, phase=phase) + + def __call__(self, x): + X = x[:, :, 0] + Y = x[:, :, 1] + return np.abs( + np.sin(self.phase.raw_value * X + self.offset.raw_value) + ) * np.abs(np.sin(self.phase.raw_value * Y + self.offset.raw_value)) + + +class AbsSin2DL(AbsSin2D): + def __call__(self, x): + X = x[:, 0] + Y = x[:, 1] + return np.abs( + np.sin(self.phase.raw_value * X + self.offset.raw_value) + ) * np.abs(np.sin(self.phase.raw_value * Y + self.offset.raw_value)) + + +@pytest.mark.parametrize("with_errors", [False, True]) +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_2D_vectorized(fit_engine, with_errors): + x = np.linspace(0, 5, 200) + mm = AbsSin2D.from_pars(0.3, 1.6) + m2 = AbsSin2D.from_pars( + 0.1, 1.8 + ) # The fit is quite sensitive to the initial values :-( + X, Y = np.meshgrid(x, x) + XY = np.stack((X, Y), axis=2) + ff = Fitter(m2, m2) + if fit_engine is not None: + try: + ff.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + try: + args = [XY, mm(XY)] + kwargs = {"vectorized": True} + if with_errors: + kwargs["weights"] = 1 / np.sqrt(args[1]) + result = ff.fit(*args, **kwargs) + except FitError as e: + if "Unable to allocate" in str(e): + pytest.skip(msg="MemoryError - Matrix too large") + else: + raise e + assert result.n_pars == len(m2.get_fit_parameters()) + assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.success + assert np.all(result.x == XY) + y_calc_ref = m2(XY) + assert result.y_calc == pytest.approx(y_calc_ref, abs=1e-2) + assert result.residual == pytest.approx(mm(XY) - y_calc_ref, abs=1e-2) + + +@pytest.mark.parametrize("with_errors", [False, True]) +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_2D_non_vectorized(fit_engine, with_errors): + x = np.linspace(0, 5, 200) + mm = AbsSin2DL.from_pars(0.3, 1.6) + m2 = AbsSin2DL.from_pars( + 0.1, 1.8 + ) # The fit is quite sensitive to the initial values :-( + X, Y = np.meshgrid(x, x) + XY = np.stack((X, Y), axis=2) + ff = Fitter(m2, m2) + if fit_engine is not None: + try: + ff.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + try: + args = [XY, mm(XY.reshape(-1, 2))] + kwargs = {"vectorized": False} + if with_errors: + kwargs["weights"] = 1 / np.sqrt(args[1]) + result = ff.fit(*args, **kwargs) + except FitError as e: + if "Unable to allocate" in str(e): + pytest.skip(msg="MemoryError - Matrix too large") + else: + raise e + assert result.n_pars == len(m2.get_fit_parameters()) + assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.success + assert np.all(result.x == XY) + y_calc_ref = m2(XY.reshape(-1, 2)) + assert result.y_calc == pytest.approx(y_calc_ref, abs=1e-2) + assert result.residual == pytest.approx( + mm(XY.reshape(-1, 2)) - y_calc_ref, abs=1e-2 + ) + + +@pytest.mark.parametrize("with_errors", [False, True]) +@pytest.mark.parametrize("fit_engine", [None, "lmfit", "bumps", "DFO_LS"]) +def test_multi_fit_1D_2D(genObjs, fit_engine, with_errors): + # Generate fit and reference objects + ref_sin1D = genObjs[0] + sp_sin1D = genObjs[1] + + ref_sin2D = AbsSin2D.from_pars(0.3, 1.6) + sp_sin2D = AbsSin2D.from_pars( + 0.1, 1.75 + ) # The fit is VERY sensitive to the initial values :-( + + # Link the parameters + ref_sin1D.offset.user_constraints["ref_sin2"] = ObjConstraint( + ref_sin2D.offset, "", ref_sin1D.offset + ) + ref_sin1D.offset.user_constraints["ref_sin2"]() + + sp_sin1D.offset.user_constraints["sp_sin2"] = ObjConstraint( + sp_sin2D.offset, "", sp_sin1D.offset + ) + sp_sin1D.offset.user_constraints["sp_sin2"]() + + # Generate data + x1D = np.linspace(0.2, 3.8, 400) + y1D = ref_sin1D(x1D) + + x = np.linspace(0, 5, 200) + X, Y = np.meshgrid(x, x) + x2D = np.stack((X, Y), axis=2) + y2D = ref_sin2D(x2D) + + ff = MultiFitter([sp_sin1D, sp_sin2D], [sp_sin1D, sp_sin2D]) + if fit_engine is not None: + try: + ff.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + + sp_sin1D.offset.fixed = False + sp_sin1D.phase.fixed = False + sp_sin2D.phase.fixed = False + + f = MultiFitter([sp_sin1D, sp_sin2D], [sp_sin1D, sp_sin2D]) + if fit_engine is not None: + try: + f.switch_engine(fit_engine) + except AttributeError: + pytest.skip(msg=f"{fit_engine} is not installed") + try: + args = [[x1D, x2D], [y1D, y2D]] + kwargs = {"vectorized": True} + if with_errors: + kwargs["weights"] = [1 / np.sqrt(y1D), 1 / np.sqrt(y2D)] + results = f.fit(*args, **kwargs) + except FitError as e: + if "Unable to allocate" in str(e): + pytest.skip(msg="MemoryError - Matrix too large") + else: + raise e + + X = [x1D, x2D] + Y = [y1D, y2D] + F_ref = [ref_sin1D, ref_sin2D] + F_real = [sp_sin1D, sp_sin2D] + for idx, result in enumerate(results): + assert result.n_pars == len(sp_sin1D.get_fit_parameters()) + len( + sp_sin2D.get_fit_parameters() + ) + assert result.chi2 == pytest.approx( + 0, abs=1.5e-3 * (len(result.x) - result.n_pars) + ) + assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.success + assert np.all(result.x == X[idx]) + assert np.all(result.y_obs == Y[idx]) + assert result.y_calc == pytest.approx(F_ref[idx](X[idx]), abs=1e-2) + assert result.residual == pytest.approx( + F_real[idx](X[idx]) - F_ref[idx](X[idx]), abs=1e-2 + ) diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index 5e483bee..8cadbc09 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -486,8 +486,8 @@ def test_baseCollection_set_index(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert obj._borg.map.convert_id_to_key(item) in edges - assert obj._borg.map.convert_id_to_key(p2) not in edges + assert item.name in edges + assert p2.name not in edges @pytest.mark.parametrize("cls", class_constructors) @@ -510,8 +510,8 @@ def test_baseCollection_set_index_based(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert obj._borg.map.convert_id_to_key(item) in edges - assert obj._borg.map.convert_id_to_key(p4) not in edges + assert item.name in edges + assert p4.name not in edges @pytest.mark.parametrize("cls", class_constructors) diff --git a/tests/unit_tests/Objects/test_Virtual.py b/tests/unit_tests/Objects/test_Virtual.py index 16fb9fc0..46f71b0e 100644 --- a/tests/unit_tests/Objects/test_Virtual.py +++ b/tests/unit_tests/Objects/test_Virtual.py @@ -52,7 +52,7 @@ def test_virtual_variable_modify(cls): obj.value = new_value assert obj.raw_value == v_obj.raw_value - id_vobj = str(cls._borg.map.convert_id(v_obj).int) + id_vobj = v_obj.name assert id_vobj in list(obj._constraints["virtual"].keys()) del v_obj From 34d609d440853b3af2f151687dae1d2a20d4a66d Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:12:23 +0200 Subject: [PATCH 26/57] NameConverter class removed, move name assignment --- src/easyscience/Objects/Graph.py | 1 + src/easyscience/Objects/ObjectClasses.py | 2 +- src/easyscience/Objects/Variable.py | 2 +- src/easyscience/fitting/minimizers/minimizer_base.py | 12 ------------ .../fitting/minimizers/minimizer_bumps.py | 2 +- src/easyscience/fitting/minimizers/minimizer_dfo.py | 2 +- .../fitting/minimizers/minimizer_lmfit.py | 2 +- 7 files changed, 6 insertions(+), 17 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index f559ad28..cfe1647f 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -130,6 +130,7 @@ def change_type(self, obj, new_type: str): def add_vertex(self, obj: object, obj_type: str = None): name = obj.name + self._store[name] = obj self.__graph_dict[name] = _EntryList() # Add objects type to the list of types self.__graph_dict[name].finalizer = weakref.finalize( diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 7ed8d400..35aa8727 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -39,11 +39,11 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} def __init__(self, name: str, interface: Optional[iF] = None): + self._name: str = name self._borg = borg self._borg.map.add_vertex(self, obj_type='created') self.interface = interface self.user_data: dict = {} - self._name: str = name @property def _arg_spec(self) -> Set[str]: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 69d14176..b730fca9 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -108,13 +108,13 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} + self.name: str = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent if parent is not None: self._borg.map.add_edge(parent, self) - self.name: str = name # Attach units if necessary if isinstance(units, ureg.Unit): self._units = ureg.Quantity(1, units=deepcopy(units)) diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index 765766d8..815696bb 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -226,18 +226,6 @@ def reduced_chi(self): return self.chi2 / (len(self.x) - self.n_pars) -class NameConverter: - def __init__(self): - from easyscience import borg - - self._borg = borg - - def get_name_from_key(self, item_key: str) -> str: - return getattr(self._borg.map.get_item_by_key(item_key), 'name', '') - - def get_item_from_key(self, item_key: str) -> object: - return self._borg.map.get_item_by_key(item_key) - class FitError(Exception): def __init__(self, e: Exception = None): diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 23abb626..01076ba0 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -81,7 +81,7 @@ def _generate_fit_function(self) -> Callable: # Get a list of `Parameters` self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index a034830a..e857c75b 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -97,7 +97,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 35ba2c9b..fd7ff411 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -77,7 +77,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = NameConverter().get_key(parameter) + key = parameter.name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) From b7617f37051c56b364faa56a7be5bde1b6d0bf91 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:17:20 +0200 Subject: [PATCH 27/57] remove get_key --- tests/unit_tests/Objects/test_BaseObj.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 0fbca274..9a600355 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -428,20 +428,17 @@ def from_pars(cls, a: float): a = A.from_pars(a_start) graph = a._borg.map - def get_key(obj): - return graph.convert_id_to_key(obj) - assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 a_ = Parameter("a", a_end) - assert get_key(a.a) in graph.get_edges(a) + assert a.a.name in graph.get_edges(a) a__ = a.a setattr(a, "a", a_) assert a.a.raw_value == a_end assert len(graph.get_edges(a)) == 1 - assert get_key(a_) in graph.get_edges(a) - assert get_key(a__) not in graph.get_edges(a) + assert a_.name in graph.get_edges(a) + assert a__.name not in graph.get_edges(a) def test_BaseCreation(): From 7575d1946fc88b3d203b138e4f6e96606de03b3e Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 14:35:23 +0200 Subject: [PATCH 28/57] Add check if object name is already taken --- src/easyscience/Objects/Graph.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index cfe1647f..273e7ac0 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -130,7 +130,8 @@ def change_type(self, obj, new_type: str): def add_vertex(self, obj: object, obj_type: str = None): name = obj.name - + if name in self._store.keys(): + raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj self.__graph_dict[name] = _EntryList() # Add objects type to the list of types self.__graph_dict[name].finalizer = weakref.finalize( From c3e917da4cd811766804a27bbf003de00cbaabe8 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 13 Jun 2024 15:51:41 +0200 Subject: [PATCH 29/57] add default name generation to BasedBase and Descriptor --- src/easyscience/Objects/ObjectClasses.py | 16 ++++++++++++++-- src/easyscience/Objects/Variable.py | 17 ++++++++++++++--- tests/unit_tests/Objects/test_Groups.py | 2 +- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 35aa8727..17ba66d3 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -38,8 +38,10 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: str, interface: Optional[iF] = None): - self._name: str = name + def __init__(self, name: Union(str, None) = None, interface: Optional[iF] = None): + if name is None: + name = self._generate_default_name() + self._name = name self._borg = borg self._borg.map.add_vertex(self, obj_type='created') self.interface = interface @@ -194,6 +196,16 @@ def __dir__(self) -> Iterable[str]: """ new_class_objs = list(k for k in dir(self.__class__) if not k.startswith('_')) return sorted(new_class_objs) + + def _generate_default_name(self) -> str: + """ + Generate a default name for the object. + """ + class_name = self.__class__.__name__ + iterator = 0 + while class_name+"_"+str(iterator) in self._borg.map.vertices(): + iterator += 1 + return class_name+"_"+str(iterator) if TYPE_CHECKING: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index b730fca9..cfd2d7e3 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -66,8 +66,8 @@ class Descriptor(ComponentSerializer): def __init__( self, - name: str, value: Any, + name: Union(str, None) = None, units: Optional[Union[str, ureg.Unit]] = None, description: Optional[str] = None, url: Optional[str] = None, @@ -107,8 +107,9 @@ def __init__( """ if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} - - self.name: str = name + if name is None: + name = self._generate_default_name() + self._name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent @@ -384,6 +385,16 @@ def to_obj_type(self, data_type: Type[Parameter], *kwargs): def __copy__(self): return self.__class__.from_dict(self.as_dict()) + def _generate_default_name(self) -> str: + """ + Generate a default name for the object. + """ + class_name = self.__class__.__name__ + iterator = 0 + while class_name+"_"+str(iterator) in self._borg.map.vertices(): + iterator += 1 + return class_name+"_"+str(iterator) + V = TypeVar('V', bound=Descriptor) diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index 8cadbc09..c19a947c 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -494,7 +494,7 @@ def test_baseCollection_set_index(cls): def test_baseCollection_set_index_based(cls): name = "test" p1 = Parameter("p1", 1) - p2 = Parameter("p1", 2) + p2 = Parameter("p2", 2) p3 = Parameter("p3", 3) p4 = Parameter("p4", 4) p5 = Parameter("p5", 5) From 82c75e7bddf5c20a1678a9842d98f33ad07dd5ef Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 09:48:37 +0200 Subject: [PATCH 30/57] borg _clear method --- src/easyscience/Objects/Graph.py | 7 ++++++- src/easyscience/Objects/ObjectClasses.py | 5 +++-- src/easyscience/Objects/Variable.py | 8 ++++---- src/easyscience/fitting/Constraints.py | 4 ++-- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 273e7ac0..742f6782 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -143,7 +143,7 @@ def add_edge(self, start_obj: object, end_obj: object): if start_obj.name in self.__graph_dict.keys(): self.__graph_dict[start_obj.name].append(end_obj.name) else: - raise AttributeError + raise AttributeError("Start object not in graph.") def get_edges(self, start_obj) -> List[str]: if start_obj.name in self.__graph_dict.keys(): @@ -179,6 +179,7 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): def prune(self, key: str): if key in self.__graph_dict.keys(): del self.__graph_dict[key] + del self._store[key] def find_isolated_vertices(self) -> list: """returns a list of isolated vertices.""" @@ -283,6 +284,10 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return True return False + def _clear(self): + self._store = weakref.WeakValueDictionary() + self.__graph_dict = {} + def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 17ba66d3..75c26558 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -38,7 +38,8 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: Union(str, None) = None, interface: Optional[iF] = None): + def __init__(self, name: str, interface: Optional[iF] = None): + self._borg = borg if name is None: name = self._generate_default_name() self._name = name @@ -245,7 +246,7 @@ def __init__( self._kwargs = kwargs for key in kwargs.keys(): if key in known_keys: - raise AttributeError + raise AttributeError("Kwargs cannot overwrite class attributes in BaseObj.") if issubclass(type(kwargs[key]), (BasedBase, Descriptor, DescriptorBase)) or 'BaseCollection' in [ c.__name__ for c in type(kwargs[key]).__bases__ ]: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index cfd2d7e3..4b02aff2 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( self._args = {'value': None, 'units': ''} if name is None: name = self._generate_default_name() - self._name = name + self.name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') # Make the connection between self and parent @@ -122,7 +122,7 @@ def __init__( elif isinstance(units, (str, type(None))): self._units = ureg.parse_expression(units) else: - raise AttributeError + raise AttributeError('Units must be a string or a pint unit object') # Clunky method of keeping self.value up to date self._type = type(value) self.__isBooleanValue = isinstance(value, bool) @@ -486,8 +486,8 @@ class Parameter(Descriptor): def __init__( self, - name: str, value: Union[numbers.Number, np.ndarray], + name: str, error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, min: Optional[numbers.Number] = -np.Inf, max: Optional[numbers.Number] = np.Inf, @@ -530,7 +530,7 @@ def __init__( if error < 0: raise ValueError('Standard deviation `error` must be positive') - super().__init__(name, value, **kwargs) + super().__init__(name=name, value=value, **kwargs) self._args['units'] = str(self.unit) # Warnings if we are given a boolean diff --git a/src/easyscience/fitting/Constraints.py b/src/easyscience/fitting/Constraints.py index bc58ae7c..0a633077 100644 --- a/src/easyscience/fitting/Constraints.py +++ b/src/easyscience/fitting/Constraints.py @@ -111,11 +111,11 @@ def __call__(self, *args, no_set: bool = False, **kwargs): return None return independent_objs = None - if isinstance(self.dependent_obj_ids, int): + if isinstance(self.dependent_obj_ids, str): dependent_obj = self.get_obj(self.dependent_obj_ids) else: raise AttributeError - if isinstance(self.independent_obj_ids, int): + if isinstance(self.independent_obj_ids, str): independent_objs = self.get_obj(self.independent_obj_ids) elif isinstance(self.independent_obj_ids, list): independent_objs = [self.get_obj(obj_id) for obj_id in self.independent_obj_ids] From 2edab459574382862a4700252034b33a880f7fe9 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:41:33 +0200 Subject: [PATCH 31/57] change id from name to unique_name --- src/easyscience/Objects/Graph.py | 38 ++++++++++++------------ src/easyscience/Objects/ObjectClasses.py | 17 +++++++++-- src/easyscience/Objects/Variable.py | 26 ++++++++++++++-- 3 files changed, 56 insertions(+), 25 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 742f6782..50dbe8ff 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -113,23 +113,23 @@ def get_item_by_key(self, item_id: str) -> object: raise ValueError def is_known(self, vertex: object) -> bool: - # All objects should have a 'name' attribute - return vertex.name in self._store.keys() + # All objects should have a 'unique_name' attribute + return vertex.unique_name in self._store.keys() def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - return self.__graph_dict[vertex.name].type + return self.__graph_dict[vertex.unique_name].type def reset_type(self, obj, default_type: str): - if obj.name in self.__graph_dict.keys(): - self.__graph_dict[obj.name].reset_type(default_type) + if obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[obj.unique_name].reset_type(default_type) def change_type(self, obj, new_type: str): - if obj.name in self.__graph_dict.keys(): - self.__graph_dict[obj.name].type = new_type + if obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[obj.unique_name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): - name = obj.name + name = obj.unique_name if name in self._store.keys(): raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj @@ -140,14 +140,14 @@ def add_vertex(self, obj: object, obj_type: str = None): self.__graph_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - if start_obj.name in self.__graph_dict.keys(): - self.__graph_dict[start_obj.name].append(end_obj.name) + if start_obj.unique_name in self.__graph_dict.keys(): + self.__graph_dict[start_obj.unique_name].append(end_obj.unique_name) else: raise AttributeError("Start object not in graph.") def get_edges(self, start_obj) -> List[str]: - if start_obj.name in self.__graph_dict.keys(): - return list(self.__graph_dict[start_obj.name]) + if start_obj.unique_name in self.__graph_dict.keys(): + return list(self.__graph_dict[start_obj.unique_name]) else: raise AttributeError @@ -165,10 +165,10 @@ def __generate_edges(self) -> list: return edges def prune_vertex_from_edge(self, parent_obj, child_obj): - vertex1 = parent_obj.name + vertex1 = parent_obj.unique_name if child_obj is None: return - vertex2 = child_obj.name + vertex2 = child_obj.unique_name if ( vertex1 in self.__graph_dict.keys() @@ -196,8 +196,8 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: in graph""" try: - start_vertex = start_obj.name - end_vertex = end_obj.name + start_vertex = start_obj.unique_name + end_vertex = end_obj.unique_name except TypeError: start_vertex = start_obj end_vertex = end_obj @@ -219,8 +219,8 @@ def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to end_vertex in graph""" - start_vertex = start_obj.name - end_vertex = end_obj.name + start_vertex = start_obj.unique_name + end_vertex = end_obj.unique_name graph = self.__graph_dict path = path + [start_vertex] @@ -247,7 +247,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: :return: :rtype: """ - end_vertex = end_obj.name + end_vertex = end_obj.unique_name path_length = sys.maxsize optimum_path = [] diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 75c26558..46731c04 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -38,10 +38,11 @@ class BasedBase(ComponentSerializer): _REDIRECT = {} - def __init__(self, name: str, interface: Optional[iF] = None): + def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg - if name is None: - name = self._generate_default_name() + if unique_name is None: + unique_name = self._generate_default_name() + self._unique_name = unique_name self._name = name self._borg = borg self._borg.map.add_vertex(self, obj_type='created') @@ -67,6 +68,16 @@ def __reduce__(self): cls = getattr(self, '__old_class__', self.__class__) return cls.from_dict, (state,) + @property + def unique_name(self) -> str: + """ Get the unique name of the object.""" + return self._unique_name + + @unique_name.setter + def unique_name(self, new_unique_name: str): + """ Set a new unique name for the object. The old name is still kept in the map. """ + self._unique_name = new_unique_name + @property def name(self) -> str: """ diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 4b02aff2..c1497fa4 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -67,8 +67,9 @@ class Descriptor(ComponentSerializer): def __init__( self, value: Any, - name: Union(str, None) = None, + name: str, units: Optional[Union[str, ureg.Unit]] = None, + unique_name: Optional[str] = None, description: Optional[str] = None, url: Optional[str] = None, display_name: Optional[str] = None, @@ -107,8 +108,9 @@ def __init__( """ if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} - if name is None: - name = self._generate_default_name() + if unique_name is None: + unique_name = self._generate_default_name() + self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated self._borg.map.add_vertex(self, obj_type='created') @@ -180,6 +182,24 @@ def __reduce__(self): cls = self.__old_class__ return cls.from_dict, (state,) + @property + def unique_name(self) -> str: + """ + Get the unique name of this object. + + :return: Unique name of this object + """ + return self._unique_name + + @unique_name.setter + def unique_name(self, name: str): + """ + Set the unique name of this object. + + :param name: Unique name of this object + """ + self._unique_name = name + @property def display_name(self) -> str: """ From 4c18dedbddad34957d24e832f33c6afcf6441307 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:46:26 +0200 Subject: [PATCH 32/57] more .name changes --- src/easyscience/fitting/minimizers/minimizer_bumps.py | 2 +- src/easyscience/fitting/minimizers/minimizer_dfo.py | 2 +- src/easyscience/fitting/minimizers/minimizer_lmfit.py | 2 +- tests/unit_tests/Objects/test_BaseObj.py | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 01076ba0..316a5881 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -81,7 +81,7 @@ def _generate_fit_function(self) -> Callable: # Get a list of `Parameters` self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index e857c75b..7d497034 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -97,7 +97,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index fd7ff411..cfcae942 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -77,7 +77,7 @@ def _generate_fit_function(self) -> Callable: self._cached_pars = {} self._cached_pars_vals = {} for parameter in self._object.get_fit_parameters(): - key = parameter.name + key = parameter.unique_name self._cached_pars[key] = parameter self._cached_pars_vals[key] = (parameter.value, parameter.error) diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 9a600355..78dda2fc 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -431,14 +431,14 @@ def from_pars(cls, a: float): assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 a_ = Parameter("a", a_end) - assert a.a.name in graph.get_edges(a) + assert a.a.unique_name in graph.get_edges(a) a__ = a.a setattr(a, "a", a_) assert a.a.raw_value == a_end assert len(graph.get_edges(a)) == 1 - assert a_.name in graph.get_edges(a) - assert a__.name not in graph.get_edges(a) + assert a_.unique_name in graph.get_edges(a) + assert a__.unique_name not in graph.get_edges(a) def test_BaseCreation(): From e170e88180ad3673a6728bd21a7993ad80d7d5ea Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 10:51:49 +0200 Subject: [PATCH 33/57] reorder name and value --- src/easyscience/Objects/Variable.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index c1497fa4..07374094 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -66,8 +66,8 @@ class Descriptor(ComponentSerializer): def __init__( self, - value: Any, name: str, + value: Any, units: Optional[Union[str, ureg.Unit]] = None, unique_name: Optional[str] = None, description: Optional[str] = None, @@ -506,8 +506,8 @@ class Parameter(Descriptor): def __init__( self, - value: Union[numbers.Number, np.ndarray], name: str, + value: Union[numbers.Number, np.ndarray], error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, min: Optional[numbers.Number] = -np.Inf, max: Optional[numbers.Number] = np.Inf, @@ -541,7 +541,7 @@ def __init__( # Set the error self._args = {'value': value, 'units': '', 'error': error} - if not isinstance(value, numbers.Number): + if not isinstance(value, numbers.Number) or isinstance(value, np.ndarray): raise ValueError('In a parameter the `value` must be numeric') if value < min: raise ValueError('`value` can not be less than `min`') From d56a111a2c15b6dee23ab3d7a232abe94da9e131 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 11:50:52 +0200 Subject: [PATCH 34/57] more .name replacements and test fixes --- src/easyscience/Objects/Groups.py | 6 ++--- src/easyscience/Objects/virtual.py | 18 +++++++------ src/easyscience/Utils/Hugger/Property.py | 28 ++++++++++---------- src/easyscience/Utils/classTools.py | 4 +-- src/easyscience/Utils/io/template.py | 2 +- src/easyscience/fitting/Constraints.py | 6 ++--- tests/integration_tests/test_undoRedo.py | 4 +-- tests/unit_tests/Fitting/test_fitting.py | 4 +-- tests/unit_tests/Objects/test_Groups.py | 8 +++--- tests/unit_tests/Objects/test_Virtual.py | 2 +- tests/unit_tests/utils/io_tests/test_dict.py | 7 ++++- tests/unit_tests/utils/io_tests/test_json.py | 3 +++ tests/unit_tests/utils/io_tests/test_xml.py | 3 ++- 13 files changed, 53 insertions(+), 42 deletions(-) diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 07fdd9d1..27ae1144 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -75,8 +75,8 @@ def __init__( for key, item in kwargs.items(): _kwargs[key] = item for arg in args: - kwargs[arg.name] = arg - _kwargs[arg.name] = arg + kwargs[arg.unique_name] = arg + _kwargs[arg.unique_name] = arg # Set kwargs, also useful for serialization self._kwargs = NotarizedDict(**_kwargs) @@ -109,7 +109,7 @@ def insert(self, index: int, value: Union[V, B]) -> None: update_key = list(self._kwargs.keys()) values = list(self._kwargs.values()) # Update the internal dict - new_key = value.name + new_key = value.unique_name update_key.insert(index, new_key) values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index f92a314d..f8ac6c49 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -48,6 +48,7 @@ def realizer(obj: BV): args = [] if klass in ec_var.__dict__.values(): # is_variable check kwargs = obj.encode_data() + kwargs["unique_name"] = None return klass(**kwargs) else: kwargs = {name: realizer(item) for name, item in obj._kwargs.items()} @@ -93,7 +94,7 @@ def component_realizer(obj: BV, component: str, recursive: bool = True): value = component._kwargs[key] else: value = key - key = value._borg.map.convert_id_to_key(value) + key = value.unique_name if getattr(value, '__old_class__', value.__class__) in ec_var.__dict__.values(): continue component._borg.map.prune_vertex_from_edge(component, component._kwargs[key]) @@ -125,14 +126,14 @@ def virtualizer(obj: BV) -> BV: old_obj = obj._borg.map.get_item_by_key(obj._derived_from) constraint = ObjConstraint(new_obj, '', old_obj) constraint.external = True - old_obj._constraints['virtual'][str(obj._borg.map.convert_id(new_obj).int)] = constraint + old_obj._constraints['virtual'][str(obj.unique_name)] = constraint new_obj._constraints['builtin'] = dict() # setattr(new_obj, "__previous_set", getattr(olobj, "__previous_set", None)) weakref.finalize( new_obj, _remover, - old_obj.name, - new_obj.name, + old_obj.unique_name, + new_obj.unique_name, ) return new_obj @@ -141,7 +142,7 @@ def virtualizer(obj: BV) -> BV: virtual_options = { '_is_virtual': True, 'is_virtual': property(fget=lambda self: self._is_virtual), - '_derived_from': property(fget=lambda self: self._borg.map.convert_id(obj).int), + '_derived_from': property(fget=obj.unique_name), '__non_virtual_class__': klass, 'realize': realizer, 'relalize_component': component_realizer, @@ -164,18 +165,19 @@ def virtualizer(obj: BV) -> BV: d = obj.encode_data() if hasattr(d, 'fixed'): d['fixed'] = True + d['unique_name'] = None v_p = cls(**d) v_p._enabled = False constraint = ObjConstraint(v_p, '', obj) constraint.external = True - obj._constraints['virtual'][str(cls._borg.map.convert_id(v_p).int)] = constraint + obj._constraints['virtual'][v_p.unique_name] = constraint v_p._constraints['builtin'] = dict() setattr(v_p, '__previous_set', getattr(obj, '__previous_set', None)) weakref.finalize( v_p, _remover, - obj.name, - v_p.name, + obj.unique_name, + v_p.unique_name, ) else: # In this case, we need to be recursive. diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index 739f820b..efd6f76a 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if var.name in borg.map.returned_objs: + if var.unique_name in borg.map.returned_objs: index = borg.map.returned_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if self._my_self.name in borg.map.created_objs: + if self._my_self.unique_name in borg.map.created_objs: # for edge in route[::-1]: index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if self._my_self.name in borg.map.created_internal: + if self._my_self.unique_name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if self._my_self.name in borg.map.created_objs: + if self._my_self.unique_name in borg.map.created_objs: index = borg.map.created_objs.index( - self._my_self.name + self._my_self.unique_name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if var.name in borg.map.argument_objs: + if var.unique_name in borg.map.argument_objs: index = borg.map.argument_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.name in borg.map.returned_objs: + elif var.unique_name in borg.map.returned_objs: index = borg.map.returned_objs.index( - var.name + var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.name in borg.map.created_objs: - index = borg.map.created_objs.index(var.name) + elif var.unique_name in borg.map.created_objs: + index = borg.map.created_objs.index(var.unique_name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 62c98704..03412a8f 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -61,9 +61,9 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis start_idx = 0 + int(skip_first) ids = [] names = [] - model_id = model_obj.name + model_id = model_obj.unique_name for par in pars: - elem = par.name + elem = par.unique_name route = borg.map.reverse_route(elem, model_id) objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] objs.reverse() diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index f57c3fd2..9f37e905 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -223,7 +223,7 @@ def runner(o): if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) if hasattr(obj, '_borg') and '@id' not in d: - d['@id'] = obj.name + d['@id'] = obj.unique_name return d @staticmethod diff --git a/src/easyscience/fitting/Constraints.py b/src/easyscience/fitting/Constraints.py index 0a633077..3995f9eb 100644 --- a/src/easyscience/fitting/Constraints.py +++ b/src/easyscience/fitting/Constraints.py @@ -43,18 +43,18 @@ def __init__( value: Optional[Number] = None, ): self.aeval = Interpreter() - self.dependent_obj_ids = dependent_obj.name + self.dependent_obj_ids = dependent_obj.unique_name self.independent_obj_ids = None self._enabled = True self.external = False self._finalizer = None if independent_obj is not None: if isinstance(independent_obj, list): - self.independent_obj_ids = [obj.name for obj in independent_obj] + self.independent_obj_ids = [obj.unique_name for obj in independent_obj] if self.dependent_obj_ids in self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') else: - self.independent_obj_ids = independent_obj.name + self.independent_obj_ids = independent_obj.unique_name if self.dependent_obj_ids == self.independent_obj_ids: raise AttributeError('A dependent object can not be an independent object') # Test if dependent is a parameter or a descriptor. diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index 267033c5..6ed4ccd1 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -287,8 +287,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: assert borg.stack.redoText() == "Fitting routine" borg.stack.redo() - assert l2.m.raw_value == res.p[f"p{l2.m.name}"] - assert l2.c.raw_value == res.p[f"p{l2.c.name}"] + assert l2.m.raw_value == res.p[f"p{l2.m.unique_name}"] + assert l2.c.raw_value == res.p[f"p{l2.c.unique_name}"] # @pytest.mark.parametrize('math_funcs', [pytest.param([Parameter.__iadd__, float.__add__], id='Addition'), diff --git a/tests/unit_tests/Fitting/test_fitting.py b/tests/unit_tests/Fitting/test_fitting.py index 5f948613..450c3d1e 100644 --- a/tests/unit_tests/Fitting/test_fitting.py +++ b/tests/unit_tests/Fitting/test_fitting.py @@ -149,11 +149,11 @@ def test_fit_result(genObjs, fit_engine): sp_sin.phase.fixed = False sp_ref1 = { - f"p{item1.name}": item1.raw_value + f"p{item1.unique_name}": item1.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } sp_ref2 = { - f"p{item1.name}": item2.raw_value + f"p{item1.unique_name}": item2.raw_value for item1, item2 in zip(sp_sin._kwargs.values(), ref_sin._kwargs.values()) } diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index c19a947c..e175f4ae 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -486,8 +486,8 @@ def test_baseCollection_set_index(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert item.name in edges - assert p2.name not in edges + assert item.unique_name in edges + assert p2.unique_name not in edges @pytest.mark.parametrize("cls", class_constructors) @@ -510,8 +510,8 @@ def test_baseCollection_set_index_based(cls): edges = obj._borg.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: - assert item.name in edges - assert p4.name not in edges + assert item.unique_name in edges + assert p4.unique_name not in edges @pytest.mark.parametrize("cls", class_constructors) diff --git a/tests/unit_tests/Objects/test_Virtual.py b/tests/unit_tests/Objects/test_Virtual.py index 46f71b0e..7c610887 100644 --- a/tests/unit_tests/Objects/test_Virtual.py +++ b/tests/unit_tests/Objects/test_Virtual.py @@ -52,7 +52,7 @@ def test_virtual_variable_modify(cls): obj.value = new_value assert obj.raw_value == v_obj.raw_value - id_vobj = v_obj.name + id_vobj = v_obj.unique_name assert id_vobj in list(obj._constraints["virtual"].keys()) del v_obj diff --git a/tests/unit_tests/utils/io_tests/test_dict.py b/tests/unit_tests/utils/io_tests/test_dict.py index bca42a54..5f4b6b2a 100644 --- a/tests/unit_tests/utils/io_tests/test_dict.py +++ b/tests/unit_tests/utils/io_tests/test_dict.py @@ -18,6 +18,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: @@ -215,7 +216,7 @@ def __init__(self, a, b): except metadata.PackageNotFoundError: version = '0.0.0' - obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) + obj = B(Descriptor("a", 1.0, unique_name="a"), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) expected = { "@module": "tests.unit_tests.utils.io_tests.test_dict", @@ -237,6 +238,7 @@ def __init__(self, a, b): "name": "a", "enabled": True, "value": 1.0, + "unique_name": "a", "url": "", }, } @@ -247,6 +249,7 @@ def test_custom_class_full_decode_with_numpy(): obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) + borg.map._clear() obj2 = B.decode(full_enc, decoder=DictSerializer) assert obj.name == obj2.name assert obj.a.raw_value == obj2.a.raw_value @@ -267,6 +270,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) + borg.map._clear() dec = dp_cls.decode(enc, decoder=DictSerializer) for k in data_dict.keys(): @@ -287,6 +291,7 @@ def test_variable_DictSerializer_from_dict(dp_kwargs: dict, dp_cls: Type[Descrip data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) + borg.map._clear() dec = dp_cls.from_dict(enc) for k in data_dict.keys(): diff --git a/tests/unit_tests/utils/io_tests/test_json.py b/tests/unit_tests/utils/io_tests/test_json.py index 92d8f3cf..e48787b7 100644 --- a/tests/unit_tests/utils/io_tests/test_json.py +++ b/tests/unit_tests/utils/io_tests/test_json.py @@ -15,6 +15,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: @@ -178,6 +179,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonSerializer) + borg.map._clear() assert isinstance(enc, str) dec = obj.decode(enc, decoder=JsonSerializer) @@ -199,5 +201,6 @@ def test_variable_DataDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descri data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonDataSerializer) + borg.map._clear() with pytest.raises(NotImplementedError): dec = obj.decode(enc, decoder=JsonDataSerializer) diff --git a/tests/unit_tests/utils/io_tests/test_xml.py b/tests/unit_tests/utils/io_tests/test_xml.py index ef8ac422..b1d35040 100644 --- a/tests/unit_tests/utils/io_tests/test_xml.py +++ b/tests/unit_tests/utils/io_tests/test_xml.py @@ -14,7 +14,7 @@ from .test_core import Descriptor from .test_core import dp_param_dict from .test_core import skip_dict - +from easyscience import borg def recursive_remove(d, remove_keys: list) -> dict: """ @@ -115,6 +115,7 @@ def test_variable_XMLDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descrip assert isinstance(enc, str) data_xml = ET.XML(enc) assert data_xml.tag == "data" + borg.map._clear() dec = dp_cls.decode(enc, decoder=XMLSerializer) for k in data_dict.keys(): From b4766a2cdeeba940521f3a25206ee02172a920e3 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:37:36 +0200 Subject: [PATCH 35/57] Fix tests --- src/easyscience/Objects/Graph.py | 14 ++++++++++++-- src/easyscience/Objects/ObjectClasses.py | 11 +---------- src/easyscience/Objects/Variable.py | 12 +----------- src/easyscience/Objects/virtual.py | 12 ++++++------ src/easyscience/Utils/Hugger/Property.py | 2 +- .../fitting/minimizers/minimizer_bumps.py | 6 +++--- .../fitting/minimizers/minimizer_dfo.py | 2 +- .../fitting/minimizers/minimizer_lmfit.py | 2 +- tests/unit_tests/Objects/test_BaseObj.py | 3 +++ tests/unit_tests/Objects/test_Groups.py | 14 +++++++++++--- 10 files changed, 40 insertions(+), 38 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 50dbe8ff..de5dcd92 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -73,6 +73,8 @@ def __init__(self): self._store = weakref.WeakValueDictionary() # A dict with object names as keys and a list of their object types as values, with weak references self.__graph_dict = {} + # A dictionary of class names and their corresponding default name_generator iterators + self._name_iterator_dict = {} def vertices(self) -> List[str]: """returns the vertices of a graph""" @@ -106,11 +108,19 @@ def _nested_get(self, obj_type: str) -> List[str]: extracted_list.append(key) return extracted_list + def _get_name_iterator(self, class_name: str) -> int: + """Get the iterator for the name generator for a class""" + if class_name in self._name_iterator_dict.keys(): + self._name_iterator_dict[class_name] += 1 + return self._name_iterator_dict[class_name] - 1 + else: + self._name_iterator_dict[class_name] = 1 + return 0 def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] - raise ValueError + raise ValueError("Item not in graph.") def is_known(self, vertex: object) -> bool: # All objects should have a 'unique_name' attribute @@ -198,7 +208,7 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: try: start_vertex = start_obj.unique_name end_vertex = end_obj.unique_name - except TypeError: + except AttributeError: start_vertex = start_obj end_vertex = end_obj diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 46731c04..9332349c 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -41,7 +41,7 @@ class BasedBase(ComponentSerializer): def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg if unique_name is None: - unique_name = self._generate_default_name() + unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) self._unique_name = unique_name self._name = name self._borg = borg @@ -209,15 +209,6 @@ def __dir__(self) -> Iterable[str]: new_class_objs = list(k for k in dir(self.__class__) if not k.startswith('_')) return sorted(new_class_objs) - def _generate_default_name(self) -> str: - """ - Generate a default name for the object. - """ - class_name = self.__class__.__name__ - iterator = 0 - while class_name+"_"+str(iterator) in self._borg.map.vertices(): - iterator += 1 - return class_name+"_"+str(iterator) if TYPE_CHECKING: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 07374094..30d0b603 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} if unique_name is None: - unique_name = self._generate_default_name() + unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated @@ -405,16 +405,6 @@ def to_obj_type(self, data_type: Type[Parameter], *kwargs): def __copy__(self): return self.__class__.from_dict(self.as_dict()) - def _generate_default_name(self) -> str: - """ - Generate a default name for the object. - """ - class_name = self.__class__.__name__ - iterator = 0 - while class_name+"_"+str(iterator) in self._borg.map.vertices(): - iterator += 1 - return class_name+"_"+str(iterator) - V = TypeVar('V', bound=Descriptor) diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index f8ac6c49..b77bf2e5 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -140,12 +140,12 @@ def virtualizer(obj: BV) -> BV: # The supplied class klass = getattr(obj, '__old_class__', obj.__class__) virtual_options = { - '_is_virtual': True, - 'is_virtual': property(fget=lambda self: self._is_virtual), - '_derived_from': property(fget=obj.unique_name), - '__non_virtual_class__': klass, - 'realize': realizer, - 'relalize_component': component_realizer, + "_is_virtual": True, + "is_virtual": property(fget=lambda self: self._is_virtual), + "_derived_from": property(fget=lambda self: obj.unique_name), + "__non_virtual_class__": klass, + "realize": realizer, + "relalize_component": component_realizer, } import easyscience.Objects.Variable as ec_var diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index efd6f76a..03864e51 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -117,7 +117,7 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: if self._my_self.unique_name in borg.map.created_internal: # We now have to trace.... route = borg.map.reverse_route(self._my_self) # noqa: F841 - index = borg.map.created_objs.index( + index = borg.map.created_internal.index( self._my_self.unique_name ) temp += ( diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 316a5881..7e7502e8 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -98,7 +98,7 @@ def fit_function(x: np.ndarray, **kwargs): """ # Update the `Parameter` values and the callback if needed for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): ## TODO clean when full move to new_variable @@ -288,7 +288,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): borg.stack.beginMacro('Fitting routine') for index, name in enumerate(self._cached_model._pnames): - dict_name = int(name[1:]) + dict_name = name[1:] pars[dict_name].value = fit_result.x[index] pars[dict_name].error = fit_result.dx[index] if stack_status: @@ -311,7 +311,7 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: pars = self._cached_pars item = {} for index, name in enumerate(self._cached_model._pnames): - dict_name = int(name[1:]) + dict_name = name[1:] ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 7d497034..47c4d8a8 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -115,7 +115,7 @@ def fit_function(x: np.ndarray, **kwargs): # Update the `Parameter` values and the callback if needed # TODO THIS IS NOT THREAD SAFE :-( for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): ## TODO clean when full move to new_variable from easyscience.Objects.new_variable import Parameter diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index cfcae942..6867c0fe 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -95,7 +95,7 @@ def fit_function(x: np.ndarray, **kwargs): # Update the `Parameter` values and the callback if needed # TODO THIS IS NOT THREAD SAFE :-( for name, value in kwargs.items(): - par_name = int(name[1:]) + par_name = name[1:] if par_name in self._cached_pars.keys(): # This will take into account constraints diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 78dda2fc..197dbcae 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -20,6 +20,7 @@ from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter from easyscience.Utils.io.dict import DictSerializer +from easyscience import borg @pytest.fixture @@ -194,6 +195,7 @@ def check_dict(check, item): if isinstance(check, dict) and isinstance(item, dict): if "@module" in item.keys(): with not_raises([ValueError, AttributeError]): + borg.map._clear() this_obj = DictSerializer().decode(item) for key in check.keys(): @@ -222,6 +224,7 @@ def test_baseobj_dir(setup_pars): "get_fit_parameters", "get_parameters", "interface", + "unique_name", "name", "par1", "par2", diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index e175f4ae..c4e14a7e 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -14,6 +14,7 @@ from easyscience.Objects.ObjectClasses import BaseObj from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter +from easyscience import borg test_dict = { "@module": "easyscience.Objects.Groups", @@ -28,6 +29,7 @@ "name": "par1", "value": 1, "units": "dimensionless", + "unique_name": "BaseCollection_0", "description": "", "url": "", "display_name": "par1", @@ -307,6 +309,7 @@ def test_baseCollection_dir(cls): "constraints", "get_fit_parameters", "append", + "unique_name", "index", "as_dict", "clear", @@ -346,6 +349,10 @@ def check_dict(dict_1: dict, dict_2: dict): del keys_1[keys_1.index("@id")] if "@id" in keys_2: del keys_2[keys_2.index("@id")] + if "unique_name" in keys_1: + del keys_1[keys_1.index("unique_name")] + if "unique_name" in keys_2: + del keys_2[keys_2.index("unique_name")] assert not set(keys_1).difference(set(keys_2)) @@ -442,6 +449,7 @@ def test_baseCollection_iterator_dict(cls): obj = cls(name, *l_object) d = obj.as_dict() + borg.map._clear() obj2 = cls.from_dict(d) for index, item in enumerate(obj2): @@ -549,11 +557,11 @@ def test_basecollectionGraph(cls): name = "test" v = [1, 2] p = [Parameter(f"p{i}", v[i]) for i in range(len(v))] - p_id = [G.convert_id_to_key(_p) for _p in p] + p_id = [_p.unique_name for _p in p] bb = cls(name, *p) - bb_id = G.convert_id_to_key(bb) + bb_id = bb.unique_name b = Beta("b", bb=bb) - b_id = G.convert_id_to_key(b) + b_id = b.unique_name for _id in p_id: assert _id in G.get_edges(bb) assert len(p) == len(G.get_edges(bb)) From 77131e6cba10c6f9415fb97cd6c89cd2ba84bbb2 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:41:33 +0200 Subject: [PATCH 36/57] Ruff --- src/easyscience/Objects/Graph.py | 1 + src/easyscience/Objects/Groups.py | 1 - src/easyscience/Objects/ObjectClasses.py | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index de5dcd92..90977aec 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -9,6 +9,7 @@ import weakref from typing import List + class _EntryList(list): def __init__(self, *args, my_type=None, **kwargs): super(_EntryList, self).__init__(*args, **kwargs) diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 27ae1144..1aecd177 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -17,7 +17,6 @@ from typing import Tuple from typing import Union -from easyscience import borg from easyscience.Objects.ObjectClasses import BasedBase from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Utils.UndoRedo import NotarizedDict diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 9332349c..1759eb1e 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -16,7 +16,6 @@ from typing import Optional from typing import Set from typing import TypeVar -from typing import Union from easyscience import borg from easyscience.Utils.classTools import addLoggedProp From c24f40a83a7c8e7c894c125565ea55fbbeb565fd Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 18 Jun 2024 15:51:01 +0200 Subject: [PATCH 37/57] tox fix --- src/easyscience/Objects/Variable.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 30d0b603..0d4424fb 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -499,8 +499,8 @@ def __init__( name: str, value: Union[numbers.Number, np.ndarray], error: Optional[Union[numbers.Number, np.ndarray]] = 0.0, - min: Optional[numbers.Number] = -np.Inf, - max: Optional[numbers.Number] = np.Inf, + min: Optional[numbers.Number] = -np.inf, + max: Optional[numbers.Number] = np.inf, fixed: Optional[bool] = False, **kwargs, ): From 1b5230d137a15f48c1a877e1f3fc5737aa0dc7e6 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Mon, 1 Jul 2024 14:09:55 +0200 Subject: [PATCH 38/57] Update map on unique_name change + code cleanup --- src/easyscience/Objects/Graph.py | 9 ++++---- src/easyscience/Objects/ObjectClasses.py | 17 ++++++++++++++-- src/easyscience/Objects/Variable.py | 26 ++++++++++++++++-------- 3 files changed, 37 insertions(+), 15 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 90977aec..3a5291e1 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -111,12 +111,11 @@ def _nested_get(self, obj_type: str) -> List[str]: def _get_name_iterator(self, class_name: str) -> int: """Get the iterator for the name generator for a class""" - if class_name in self._name_iterator_dict.keys(): - self._name_iterator_dict[class_name] += 1 - return self._name_iterator_dict[class_name] - 1 + if class_name not in self._name_iterator_dict.keys(): + self._name_iterator_dict[class_name] = 0 else: - self._name_iterator_dict[class_name] = 1 - return 0 + self._name_iterator_dict[class_name] += 1 + return self._name_iterator_dict[class_name] def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 1759eb1e..88460b30 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -40,7 +40,7 @@ class BasedBase(ComponentSerializer): def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): self._borg = borg if unique_name is None: - unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) + unique_name = self._unique_name_generator() self._unique_name = unique_name self._name = name self._borg = borg @@ -74,8 +74,13 @@ def unique_name(self) -> str: @unique_name.setter def unique_name(self, new_unique_name: str): - """ Set a new unique name for the object. The old name is still kept in the map. """ + """ Set a new unique name for the object. The old name is still kept in the map. + + :param new_unique_name: New unique name for the object""" + if not isinstance(new_unique_name, str): + raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name + self._borg.map.add_vertex(self) @property def name(self) -> str: @@ -199,6 +204,14 @@ def get_fit_parameters(self) -> Union[List[Parameter], List[new_Parameter]]: fit_list.append(item) return fit_list + def _unique_name_generator(self) -> str: + """ + Generate a generic unique name for the object using the class name and a global iterator. + """ + class_name = self.__class__.__name__ + iterator_string = str(self._borg.map._get_name_iterator(class_name)) + return class_name + "_" + iterator_string + def __dir__(self) -> Iterable[str]: """ This creates auto-completion and helps out in iPython notebooks. diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 0d4424fb..74b37813 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -109,7 +109,7 @@ def __init__( if not hasattr(self, '_args'): self._args = {'value': None, 'units': ''} if unique_name is None: - unique_name = self.__class__.__name__ + "_" + str(self._borg.map._get_name_iterator(self.__class__.__name__)) + unique_name = self._unique_name_generator() self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated @@ -192,13 +192,14 @@ def unique_name(self) -> str: return self._unique_name @unique_name.setter - def unique_name(self, name: str): - """ - Set the unique name of this object. - - :param name: Unique name of this object - """ - self._unique_name = name + def unique_name(self, new_unique_name: str): + """ Set a new unique name for the object. The old name is still kept in the map. + + :param new_unique_name: New unique name for the object""" + if not isinstance(new_unique_name, str): + raise TypeError("Unique name has to be a string.") + self._unique_name = new_unique_name + self._borg.map.add_vertex(self) @property def display_name(self) -> str: @@ -362,6 +363,15 @@ def convert_unit(self, unit_str: str): self._args['value'] = self.raw_value self._args['units'] = str(self.unit) + def _unique_name_generator(self) -> str: + """ + Generate a generic unique name for the object using the class name and a global iterator. + """ + class_name = self.__class__.__name__ + iterator_string = str(self._borg.map._get_name_iterator(class_name)) + return class_name + "_" + iterator_string + + # @cached_property @property def compatible_units(self) -> List[str]: From d2e7edfd0e3ba4700076096d9de2783d358e3c70 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Mon, 1 Jul 2024 16:26:05 +0200 Subject: [PATCH 39/57] Clear graph unittest --- src/easyscience/Objects/Graph.py | 2 ++ tests/unit_tests/Objects/test_graph.py | 25 ++++++++++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/Objects/Graph.py index 3a5291e1..734bf147 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/Objects/Graph.py @@ -295,8 +295,10 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return False def _clear(self): + """ Reset the graph to an empty state. """ self._store = weakref.WeakValueDictionary() self.__graph_dict = {} + self._name_iterator_dict = {} def __repr__(self) -> str: return f"Graph object of {len(self._store)} vertices." diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 2dbe6915..207b7a59 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -1,7 +1,26 @@ -__author__ = "github.com/wardsimon" -__version__ = "0.1.0" - # SPDX-FileCopyrightText: 2023 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause # © 2021-2023 Contributors to the EasyScience project Date: Tue, 2 Jul 2024 10:09:52 +0200 Subject: [PATCH 40/57] test_add_vertex --- tests/unit_tests/Objects/test_graph.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 207b7a59..92f8e9cd 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -13,8 +13,7 @@ class TestGraph: def clear(self): borg.map._clear() - def test_clear(self): - borg.map._clear() + def test_clear(self, clear): test_obj = BaseObj("test") assert len(borg.map._store) == 1 assert len(borg.map._Graph__graph_dict) == 1 @@ -24,3 +23,13 @@ def test_clear(self): assert borg.map._Graph__graph_dict == {} assert borg.map._name_iterator_dict == {} + def test_add_vertex(self, clear): + test_obj = BaseObj("test") + assert len(borg.map._store) == 1 + assert len(borg.map._Graph__graph_dict) == 1 + assert borg.map._name_iterator_dict == {"BaseObj": 0} + + @pytest.mark.parametrize("name", ["test", "test2", "test3"]) + def test_clear_fixture(self, name, clear): + test_obj= BaseObj(name, unique_name=name) + assert len(borg.map._store) == 1 \ No newline at end of file From f0a94cfb0fca5f8e64adab5753d0fbd1d8b11bc6 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 2 Jul 2024 10:20:15 +0200 Subject: [PATCH 41/57] Rename borg to global_object --- examples_old/example4.py | 8 +- examples_old/example5_broken.py | 10 +- examples_old/example6_broken.py | 8 +- src/easyscience/Fitting/Fitting.py | 473 ++++++++++++++++++ src/easyscience/Objects/Groups.py | 16 +- src/easyscience/Objects/ObjectClasses.py | 33 +- src/easyscience/Objects/Variable.py | 46 +- .../Objects/{Borg.py => global_object.py} | 4 +- src/easyscience/Objects/virtual.py | 18 +- src/easyscience/Utils/Hugger/Property.py | 60 +-- src/easyscience/Utils/UndoRedo.py | 12 +- src/easyscience/Utils/classTools.py | 6 +- src/easyscience/Utils/decorators.py | 4 +- src/easyscience/Utils/io/template.py | 2 +- src/easyscience/__init__.py | 8 +- src/easyscience/fitting/Constraints.py | 12 +- .../fitting/minimizers/minimizer_bumps.py | 16 +- .../fitting/minimizers/minimizer_dfo.py | 16 +- .../fitting/minimizers/minimizer_lmfit.py | 16 +- tests/integration_tests/test_undoRedo.py | 98 ++-- tests/unit_tests/Objects/test_BaseObj.py | 10 +- .../Objects/test_Descriptor_Parameter.py | 4 +- tests/unit_tests/Objects/test_Groups.py | 12 +- tests/unit_tests/Objects/test_graph.py | 26 +- tests/unit_tests/utils/io_tests/test_dict.py | 8 +- tests/unit_tests/utils/io_tests/test_json.py | 6 +- tests/unit_tests/utils/io_tests/test_xml.py | 4 +- 27 files changed, 706 insertions(+), 230 deletions(-) create mode 100644 src/easyscience/Fitting/Fitting.py rename src/easyscience/Objects/{Borg.py => global_object.py} (91%) diff --git a/examples_old/example4.py b/examples_old/example4.py index 7b6e6c98..90bc59be 100644 --- a/examples_old/example4.py +++ b/examples_old/example4.py @@ -9,7 +9,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting import Fitter from easyscience.Objects.core import ComponentSerializer from easyscience.Objects.ObjectClasses import BaseObj @@ -84,7 +84,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -171,7 +171,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -224,7 +224,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): diff --git a/examples_old/example5_broken.py b/examples_old/example5_broken.py index c88451d3..6a1e5413 100644 --- a/examples_old/example5_broken.py +++ b/examples_old/example5_broken.py @@ -8,7 +8,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting import Fitter from easyscience.Objects.Base import BaseObj from easyscience.Objects.Base import Parameter @@ -86,7 +86,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -173,7 +173,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -226,7 +226,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): @@ -338,7 +338,7 @@ def __repr__(self): return f"Line: m={self.m}, c={self.c}" -borg.debug = True +global_object.debug = True interface = InterfaceFactory() line = Line(interface_factory=interface) diff --git a/examples_old/example6_broken.py b/examples_old/example6_broken.py index 88c13a65..4fce34e7 100644 --- a/examples_old/example6_broken.py +++ b/examples_old/example6_broken.py @@ -8,7 +8,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience.Fitting import Fitter from easyscience.Objects.Base import BaseObj from easyscience.Objects.Base import Parameter @@ -92,7 +92,7 @@ class InterfaceTemplate(ComponentSerializer, metaclass=ABCMeta): """ _interfaces = [] - _borg = borg + _global_object = global_object def __init_subclass__(cls, is_abstract: bool = False, **kwargs): """ @@ -183,7 +183,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface1: Value of {value_label} set to {value}") setattr(self.calculator, value_label, value) @@ -264,7 +264,7 @@ def set_value(self, value_label: str, value: float): :return: None :rtype: noneType """ - if self._borg.debug: + if self._global_object.debug: print(f"Interface2: Value of {value_label} set to {value}") self._data = json.loads(self.calculator.export_data()) if value_label in self._data.keys(): diff --git a/src/easyscience/Fitting/Fitting.py b/src/easyscience/Fitting/Fitting.py new file mode 100644 index 00000000..3ffaad77 --- /dev/null +++ b/src/easyscience/Fitting/Fitting.py @@ -0,0 +1,473 @@ +from __future__ import annotations + +__author__ = 'github.com/wardsimon' +__version__ = '0.0.1' + +import functools + +# SPDX-FileCopyrightText: 2023 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause +# © 2021-2023 Contributors to the EasyScience project Callable: + """ + Simple fit function which injects the real X (independent) values into the + optimizer function. This will also flatten the results if needed. + :param real_x: Independent x parameters to be injected + :param flatten: Should the result be a flat 1D array? + :return: Wrapped optimizer function. + """ + fun = self._fit_function + + @functools.wraps(fun) + def wrapped_fit_function(x, **kwargs): + if real_x is not None: + x = real_x + dependent = fun(x, **kwargs) + if flatten: + dependent = dependent.flatten() + return dependent + + return wrapped_fit_function + + def initialize(self, fit_object: B, fit_function: Callable): + """ + Set the model and callable in the calculator interface. + + :param fit_object: The EasyScience model object + :param fit_function: The function to be optimized against. + :return: None + """ + self._fit_object = fit_object + self._fit_function = fit_function + self.__initialize() + + def __initialize(self): + """ + The real initialization. Setting the optimizer object properly + :return: None + """ + self.__engine_obj = self._current_engine(self._fit_object, self.fit_function) + self._is_initialized = True + + def create(self, engine_name: str = default_fitting_engine): + """ + Create a backend optimization engine. + :param engine_name: The label of the optimization engine to create. + :return: None + """ + engines = self.available_engines + if engine_name in engines: + self._current_engine = self._engines[engines.index(engine_name)] + self._is_initialized = False + else: + raise AttributeError(f"The supplied optimizer engine '{engine_name}' is unknown.") + + def switch_engine(self, engine_name: str): + """ + Switch backend optimization engines and initialize. + :param engine_name: The label of the optimization engine to create and instantiate. + :return: None + """ + # There isn't any state to carry over + if not self._is_initialized: + raise ReferenceError('The fitting engine must be initialized before switching') + # Constrains are not carried over. Do it manually. + constraints = self.__engine_obj._constraints + self.create(engine_name) + self.__initialize() + self.__engine_obj._constraints = constraints + + @property + def available_engines(self) -> List[str]: + """ + Get a list of the names of available fitting engines + + :return: List of available fitting engines + :rtype: List[str] + """ + if Fitting.engines is None: + raise ImportError('There are no available fitting engines. Install `lmfit` and/or `bumps`') + return [engine.name for engine in Fitting.engines] + + @property + def can_fit(self) -> bool: + """ + Can a fit be performed. i.e has the object been created properly + + :return: Can a fit be performed + :rtype: bool + """ + return self._is_initialized + + @property + def current_engine(self) -> _C: + """ + Get the class object of the current fitting engine. + + :return: Class of the current fitting engine (based on the `FittingTemplate` class) + :rtype: _T + """ + return self._current_engine + + @property + def engine(self) -> _M: + """ + Get the current fitting engine object. + + :return: + :rtype: _M + """ + return self.__engine_obj + + @property + def fit_function(self) -> Callable: + """ + The raw fit function that the optimizer will call (no wrapping) + :return: Raw fit function + """ + return self._fit_function + + @fit_function.setter + def fit_function(self, fit_function: Callable): + """ + Set the raw fit function to a new one. + :param fit_function: New fit function + :return: None + """ + self._fit_function = fit_function + self.__initialize() + + @property + def fit_object(self) -> B: + """ + The EasyScience object which will be used as a model + :return: EasyScience Model + """ + return self._fit_object + + @fit_object.setter + def fit_object(self, fit_object: B): + """ + Set the EasyScience object which wil be used as a model + :param fit_object: New EasyScience object + :return: None + """ + self._fit_object = fit_object + self.__initialize() + + def __pass_through_generator(self, name: str): + """ + Attach the attributes of the calculator template to the current fitter instance. + :param name: Attribute name to attach + :return: Wrapped calculator interface object. + """ + obj = self + + def inner(*args, **kwargs): + if not obj.can_fit: + raise ReferenceError('The fitting engine must first be initialized') + func = getattr(obj.engine, name, None) + if func is None: + raise ValueError('The fitting engine does not have the attribute "{}"'.format(name)) + return func(*args, **kwargs) + + return inner + + @property + def fit(self) -> Callable: + """ + Property which wraps the current `fit` function from the fitting interface. This property return a wrapped fit + function which converts the input data into the correct shape for the optimizer, wraps the fit function to + re-constitute the independent variables and once the fit is completed, reshape the inputs to those expected. + """ + + @functools.wraps(self.engine.fit) + def inner_fit_callable( + x: np.ndarray, + y: np.ndarray, + weights: Optional[np.ndarray] = None, + vectorized: bool = False, + **kwargs, + ) -> FR: + """ + This is a wrapped callable which performs the actual fitting. It is split into + 3 sections, PRE/ FIT/ POST. + - PRE = Reshaping the input data into the correct dimensions for the optimizer + - FIT = Wrapping the fit function and performing the fit + - POST = Reshaping the outputs so it is coherent with the inputs. + """ + # Check to see if we can perform a fit + if not self.can_fit: + raise ReferenceError('The fitting engine must first be initialized') + + # Precompute - Reshape all independents into the correct dimensionality + x_fit, x_new, y_new, weights, dims, kwargs = self._precompute_reshaping(x, y, weights, vectorized, kwargs) + self._dependent_dims = dims + + # Fit + fit_fun = self._fit_function + fit_fun_wrap = self._fit_function_wrapper(x_new, flatten=True) # This should be wrapped. + + # We change the fit function, so have to reset constraints + constraints = self.__engine_obj._constraints + self.fit_function = fit_fun_wrap + self.__engine_obj._constraints = constraints + f_res = self.engine.fit(x_fit, y_new, weights=weights, **kwargs) + + # Postcompute + fit_result = self._post_compute_reshaping(f_res, x, y, weights) + # Reset the function and constrains + self.fit_function = fit_fun + self.__engine_obj._constraints = constraints + return fit_result + + return inner_fit_callable + + @staticmethod + def _precompute_reshaping( + x: np.ndarray, + y: np.ndarray, + weights: Optional[np.ndarray], + vectorized: bool, + kwargs, + ): + """ + Check the dimensions of the inputs and reshape if necessary. + :param x: ND matrix of dependent points + :param y: N-1D matrix of independent points + :param kwargs: Additional key-word arguments + :return: + """ + # Make sure that they are np arrays + x_new = np.array(x) + y_new = np.array(y) + # Get the shape + x_shape = x_new.shape + # Check if the x data is 1D + if len(x_shape) > 1: + # It is ND data + # Check if the data is vectorized. i.e. should x be [NxMx...x Ndims] + if vectorized: + # Assert that the shapes are the same + if np.all(x_shape[:-1] != y_new.shape): + raise ValueError('The shape of the x and y data must be the same') + # If so do nothing but note that the data is vectorized + # x_shape = (-1,) # Should this be done? + else: + # Assert that the shapes are the same + if np.prod(x_new.shape[:-1]) != y_new.size: + raise ValueError('The number of elements in x and y data must be the same') + # Reshape the data to be [len(NxMx..), Ndims] i.e. flatten to columns + x_new = x_new.reshape(-1, x_shape[-1], order='F') + else: + # Assert that the shapes are the same + if np.all(x_shape != y_new.shape): + raise ValueError('The shape of the x and y data must be the same') + # It is 1D data + x_new = x.flatten() + # The optimizer needs a 1D array, flatten the y data + y_new = y_new.flatten() + if weights is not None: + weights = np.array(weights).flatten() + # Make a 'dummy' x array for the fit function + x_for_fit = np.array(range(y_new.size)) + return x_for_fit, x_new, y_new, weights, x_shape, kwargs + + @staticmethod + def _post_compute_reshaping(fit_result: FR, x: np.ndarray, y: np.ndarray, weights: np.ndarray) -> FR: + """ + Reshape the output of the fitter into the correct dimensions. + :param fit_result: Output from the fitter + :param x: Input x independent + :param y: Input y dependent + :return: Reshaped Fit Results + """ + setattr(fit_result, 'x', x) + setattr(fit_result, 'y_obs', y) + setattr(fit_result, 'y_calc', np.reshape(fit_result.y_calc, y.shape)) + setattr(fit_result, 'y_err', np.reshape(fit_result.y_err, y.shape)) + return fit_result + + +class MultiFitter(Fitter): + """ + Extension of Fitter to enable multiple dataset/fit function fitting. We can fit these types of data simultaneously: + - Multiple models on multiple datasets. + """ + + def __init__( + self, + fit_objects: Optional[List[B]] = None, + fit_functions: Optional[List[Callable]] = None, + ): + # Create a dummy core object to hold all the fit objects. + self._fit_objects = BaseCollection('multi', *fit_objects) + self._fit_functions = fit_functions + # Initialize with the first of the fit_functions, without this it is + # not possible to change the fitting engine. + super().__init__(self._fit_objects, self._fit_functions[0]) + + def _fit_function_wrapper(self, real_x=None, flatten: bool = True) -> Callable: + """ + Simple fit function which injects the N real X (independent) values into the + optimizer function. This will also flatten the results if needed. + :param real_x: List of independent x parameters to be injected + :param flatten: Should the result be a flat 1D array? + :return: Wrapped optimizer function. + """ + # Extract of a list of callable functions + wrapped_fns = [] + for this_x, this_fun in zip(real_x, self._fit_functions): + self._fit_function = this_fun + wrapped_fns.append(Fitter._fit_function_wrapper(self, this_x, flatten=flatten)) + + def wrapped_fun(x, **kwargs): + # Generate an empty Y based on x + y = np.zeros_like(x) + i = 0 + # Iterate through wrapped functions, passing the WRONG x, the correct + # x was injected in the step above. + for idx, dim in enumerate(self._dependent_dims): + ep = i + np.prod(dim) + y[i:ep] = wrapped_fns[idx](x, **kwargs) + i = ep + return y + + return wrapped_fun + + @staticmethod + def _precompute_reshaping( + x: List[np.ndarray], + y: List[np.ndarray], + weights: Optional[List[np.ndarray]], + vectorized: bool, + kwargs, + ): + """ + Convert an array of X's and Y's to an acceptable shape for fitting. + :param x: List of independent variables. + :param y: List of dependent variables. + :param vectorized: Is the fn input vectorized or point based? + :param kwargs: Additional kwy words. + :return: Variables for optimization + """ + if weights is None: + weights = [None] * len(x) + _, _x_new, _y_new, _weights, _dims, kwargs = Fitter._precompute_reshaping(x[0], y[0], weights[0], vectorized, kwargs) + x_new = [_x_new] + y_new = [_y_new] + w_new = [_weights] + dims = [_dims] + for _x, _y, _w in zip(x[1::], y[1::], weights[1::]): + _, _x_new, _y_new, _weights, _dims, _ = Fitter._precompute_reshaping(_x, _y, _w, vectorized, kwargs) + x_new.append(_x_new) + y_new.append(_y_new) + w_new.append(_weights) + dims.append(_dims) + y_new = np.hstack(y_new) + if w_new[0] is None: + w_new = None + else: + w_new = np.hstack(w_new) + x_fit = np.linspace(0, y_new.size - 1, y_new.size) + return x_fit, x_new, y_new, w_new, dims, kwargs + + def _post_compute_reshaping( + self, + fit_result_obj: FR, + x: List[np.ndarray], + y: List[np.ndarray], + weights: List[np.ndarray], + ) -> List[FR]: + """ + Take a fit results object and split it into n chuncks based on the size of the x, y inputs + :param fit_result_obj: Result from a multifit + :param x: List of X co-ords + :param y: List of Y co-ords + :return: List of fit results + """ + + cls = fit_result_obj.__class__ + sp = 0 + fit_results_list = [] + for idx, this_x in enumerate(x): + # Create a new Results obj + current_results = cls() + ep = sp + int(np.array(self._dependent_dims[idx]).prod()) + + # Fill out the new result obj (see EasyScience.Fitting.Fitting_template.FitResults) + current_results.success = fit_result_obj.success + current_results.fitting_engine = fit_result_obj.fitting_engine + current_results.p = fit_result_obj.p + current_results.p0 = fit_result_obj.p0 + current_results.x = this_x + current_results.y_obs = y[idx] + current_results.y_calc = np.reshape(fit_result_obj.y_calc[sp:ep], current_results.y_obs.shape) + current_results.y_err = np.reshape(fit_result_obj.y_err[sp:ep], current_results.y_obs.shape) + current_results.engine_result = fit_result_obj.engine_result + + # Attach an additional field for the un-modified results + current_results.total_results = fit_result_obj + fit_results_list.append(current_results) + sp = ep + return fit_results_list diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 1aecd177..896e9dad 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -83,8 +83,8 @@ def __init__( for key in kwargs.keys(): if key in self.__dict__.keys() or key in self.__slots__: raise AttributeError(f'Given kwarg: `{key}`, is an internal attribute. Please rename.') - self._borg.map.add_edge(self, kwargs[key]) - self._borg.map.reset_type(kwargs[key], 'created_internal') + self._global_object.map.add_edge(self, kwargs[key]) + self._global_object.map.reset_type(kwargs[key], 'created_internal') if interface is not None: kwargs[key].interface = interface # TODO wrap getter and setter in Logger @@ -113,8 +113,8 @@ def insert(self, index: int, value: Union[V, B]) -> None: values.insert(index, value) self._kwargs.reorder(**{k: v for k, v in zip(update_key, values)}) # ADD EDGE - self._borg.map.add_edge(self, value) - self._borg.map.reset_type(value, 'created_internal') + self._global_object.map.add_edge(self, value) + self._global_object.map.reset_type(value, 'created_internal') value.interface = self.interface else: raise AttributeError('Only EasyScience objects can be put into an EasyScience group') @@ -173,11 +173,11 @@ def __setitem__(self, key: int, value: Union[B, V]) -> None: update_dict = {update_key[key]: value} self._kwargs.update(update_dict) # ADD EDGE - self._borg.map.add_edge(self, value) - self._borg.map.reset_type(value, 'created_internal') + self._global_object.map.add_edge(self, value) + self._global_object.map.reset_type(value, 'created_internal') value.interface = self.interface # REMOVE EDGE - self._borg.map.prune_vertex_from_edge(self, old_item) + self._global_object.map.prune_vertex_from_edge(self, old_item) else: raise NotImplementedError('At the moment only numerical values or EasyScience objects can be set.') @@ -192,7 +192,7 @@ def __delitem__(self, key: int) -> None: """ keys = list(self._kwargs.keys()) item = self._kwargs[keys[key]] - self._borg.map.prune_vertex_from_edge(self, item) + self._global_object.map.prune_vertex_from_edge(self, item) del self._kwargs[keys[key]] def __len__(self) -> int: diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 88460b30..43bbb6a7 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -17,7 +17,7 @@ from typing import Set from typing import TypeVar -from easyscience import borg +from easyscience import global_object from easyscience.Utils.classTools import addLoggedProp from .core import ComponentSerializer @@ -33,18 +33,17 @@ class BasedBase(ComponentSerializer): - __slots__ = ['_name', '_borg', 'user_data', '_kwargs'] + __slots__ = ['_name', '_global_object', 'user_data', '_kwargs'] _REDIRECT = {} def __init__(self, name: str, interface: Optional[iF] = None, unique_name: Optional[str] = None): - self._borg = borg + self._global_object = global_object if unique_name is None: unique_name = self._unique_name_generator() self._unique_name = unique_name self._name = name - self._borg = borg - self._borg.map.add_vertex(self, obj_type='created') + self._global_object.map.add_vertex(self, obj_type="created") self.interface = interface self.user_data: dict = {} @@ -80,7 +79,7 @@ def unique_name(self, new_unique_name: str): if not isinstance(new_unique_name, str): raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name - self._borg.map.add_vertex(self) + self._global_object.map.add_vertex(self) @property def name(self) -> str: @@ -132,10 +131,12 @@ def generate_bindings(self): if self.interface is None: raise AttributeError('Interface error for generating bindings. `interface` has to be set.') interfaceable_children = [ - key for key in self._borg.map.get_edges(self) if issubclass(type(self._borg.map.get_item_by_key(key)), BasedBase) + key + for key in self._global_object.map.get_edges(self) + if issubclass(type(self._global_object.map.get_item_by_key(key)), BasedBase) ] for child_key in interfaceable_children: - child = self._borg.map.get_item_by_key(child_key) + child = self._global_object.map.get_item_by_key(child_key) child.interface = self.interface self.interface.generate_bindings(self) @@ -209,7 +210,7 @@ def _unique_name_generator(self) -> str: Generate a generic unique name for the object using the class name and a global iterator. """ class_name = self.__class__.__name__ - iterator_string = str(self._borg.map._get_name_iterator(class_name)) + iterator_string = str(self._global_object.map._get_name_iterator(class_name)) return class_name + "_" + iterator_string def __dir__(self) -> Iterable[str]: @@ -264,8 +265,8 @@ def __init__( if issubclass(type(kwargs[key]), (BasedBase, Descriptor, DescriptorBase)) or 'BaseCollection' in [ c.__name__ for c in type(kwargs[key]).__bases__ ]: - self._borg.map.add_edge(self, kwargs[key]) - self._borg.map.reset_type(kwargs[key], 'created_internal') + self._global_object.map.add_edge(self, kwargs[key]) + self._global_object.map.reset_type(kwargs[key], 'created_internal') addLoggedProp( self, key, @@ -298,8 +299,8 @@ def __init__(self, foo: Parameter, bar: Parameter): :return: None """ self._kwargs[key] = component - self._borg.map.add_edge(self, component) - self._borg.map.reset_type(component, 'created_internal') + self._global_object.map.add_edge(self, component) + self._global_object.map.reset_type(component, 'created_internal') addLoggedProp( self, key, @@ -325,13 +326,13 @@ def __setattr__(self, key: str, value: BV) -> None: ): if issubclass(type(getattr(self, key, None)), (BasedBase, Descriptor, DescriptorBase)): old_obj = self.__getattribute__(key) - self._borg.map.prune_vertex_from_edge(self, old_obj) + self._global_object.map.prune_vertex_from_edge(self, old_obj) self._add_component(key, value) else: if hasattr(self, key) and issubclass(type(value), (BasedBase, Descriptor, DescriptorBase)): old_obj = self.__getattribute__(key) - self._borg.map.prune_vertex_from_edge(self, old_obj) - self._borg.map.add_edge(self, value) + self._global_object.map.prune_vertex_from_edge(self, old_obj) + self._global_object.map.add_edge(self, value) super(BaseObj, self).__setattr__(key, value) # Update the interface bindings if something changed (BasedBase and Descriptor) if old_obj is not None: diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 74b37813..7676d27d 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -27,7 +27,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object from easyscience import pint from easyscience import ureg from easyscience.fitting.Constraints import SelfConstraint @@ -55,7 +55,7 @@ class Descriptor(ComponentSerializer): """ _constructor = Q_ - _borg = borg + _global_object = global_object _REDIRECT = { 'value': lambda obj: obj.raw_value, 'units': lambda obj: obj._args['units'], @@ -113,10 +113,10 @@ def __init__( self._unique_name = unique_name self.name = name # Let the collective know we've been assimilated - self._borg.map.add_vertex(self, obj_type='created') + self._global_object.map.add_vertex(self, obj_type='created') # Make the connection between self and parent if parent is not None: - self._borg.map.add_edge(parent, self) + self._global_object.map.add_edge(parent, self) # Attach units if necessary if isinstance(units, ureg.Unit): @@ -199,7 +199,7 @@ def unique_name(self, new_unique_name: str): if not isinstance(new_unique_name, str): raise TypeError("Unique name has to be a string.") self._unique_name = new_unique_name - self._borg.map.add_vertex(self) + self._global_object.map.add_vertex(self) @property def display_name(self) -> str: @@ -305,7 +305,7 @@ def value(self, value: Any): :return: None """ if not self.enabled: - if borg.debug: + if global_object.debug: raise CoreSetException(f'{str(self)} is not enabled.') return self.__deepValueSetter(value) @@ -368,7 +368,7 @@ def _unique_name_generator(self) -> str: Generate a generic unique name for the object using the class name and a global iterator. """ class_name = self.__class__.__name__ - iterator_string = str(self._borg.map._get_name_iterator(class_name)) + iterator_string = str(self._global_object.map._get_name_iterator(class_name)) return class_name + "_" + iterator_string @@ -464,15 +464,15 @@ def _property_value(self, set_value: Union[numbers.Number, np.ndarray, Q_]): set_value = set_value.magnitude # Save the old state and create the new state old_value = self._value - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: new_value = old_value if set_value in self.available_options: new_value = set_value finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) # Restore to the old state self.__previous_set(self, new_value) @@ -619,13 +619,13 @@ def _property_value(self, set_value: Union[numbers.Number, np.ndarray, M_]) -> N new_value = self.__constraint_runner(constraint_type, set_value) # Then run any user constraints. constraint_type: dict = self.user_constraints - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: new_value = self.__constraint_runner(constraint_type, new_value) finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) # And finally update any virtual constraints constraint_type: dict = self._constraints['virtual'] @@ -720,9 +720,9 @@ def fixed(self, value: bool): :return: None """ if not self.enabled: - if self._borg.stack.enabled: - self._borg.stack.pop() - if borg.debug: + if self._global_object.stack.enabled: + self._global_object.stack.pop() + if global_object.debug: raise CoreSetException(f'{str(self)} is not enabled.') return # TODO Should we try and cast value to bool rather than throw ValueError? @@ -812,13 +812,13 @@ def _quick_set( # Then run any user constraints. if run_user_constraints: constraint_type: dict = self.user_constraints - state = self._borg.stack.enabled + state = self._global_object.stack.enabled if state: - self._borg.stack.force_state(False) + self._global_object.stack.force_state(False) try: set_value = self.__constraint_runner(constraint_type, set_value) finally: - self._borg.stack.force_state(state) + self._global_object.stack.force_state(state) if run_virtual_constraints: # And finally update any virtual constraints constraint_type: dict = self._constraints['virtual'] @@ -841,7 +841,7 @@ def __constraint_runner( continue this_new_value = constraint(no_set=True) if this_new_value != newer_value: - if borg.debug: + if global_object.debug: print(f'Constraint `{constraint}` has been applied') self._value = self.__class__._constructor( value=this_new_value, @@ -870,8 +870,8 @@ def bounds(self, new_bound: Union[Tuple[numbers.Number, numbers.Number], numbers """ # Macro checking and opening for undo/redo close_macro = False - if self._borg.stack.enabled: - self._borg.stack.beginMacro('Setting bounds') + if self._global_object.stack.enabled: + self._global_object.stack.beginMacro('Setting bounds') close_macro = True # Have we only been given a single number (MIN)? if isinstance(new_bound, numbers.Number): @@ -891,4 +891,4 @@ def bounds(self, new_bound: Union[Tuple[numbers.Number, numbers.Number], numbers self.fixed = False # Close the macro if we opened it if close_macro: - self._borg.stack.endMacro() + self._global_object.stack.endMacro() diff --git a/src/easyscience/Objects/Borg.py b/src/easyscience/Objects/global_object.py similarity index 91% rename from src/easyscience/Objects/Borg.py rename to src/easyscience/Objects/global_object.py index c926c3b9..f936447e 100644 --- a/src/easyscience/Objects/Borg.py +++ b/src/easyscience/Objects/global_object.py @@ -12,9 +12,9 @@ @singleton -class Borg: +class GlobalObject: """ - Borg is the assimilated knowledge of `EasyScience`. Every class based on `EasyScience` gets brought + GlobalObject is the assimilated knowledge of `EasyScience`. Every class based on `EasyScience` gets brought into the collective. """ diff --git a/src/easyscience/Objects/virtual.py b/src/easyscience/Objects/virtual.py index b77bf2e5..186db323 100644 --- a/src/easyscience/Objects/virtual.py +++ b/src/easyscience/Objects/virtual.py @@ -14,8 +14,8 @@ from typing import Iterable from typing import MutableSequence -from easyscience import borg -from easyscience.fitting.Constraints import ObjConstraint +from easyscience import global_object +from easyscience.Fitting.Constraints import ObjConstraint if TYPE_CHECKING: from easyscience.Utils.typing import BV @@ -28,7 +28,7 @@ def raise_(ex): def _remover(a_obj_id: str, v_obj_id: str): try: # Try to get parent object (might be deleted) - a_obj = borg.map.get_item_by_key(a_obj_id) + a_obj = global_object.map.get_item_by_key(a_obj_id) except ValueError: return if a_obj._constraints['virtual'].get(v_obj_id, False): @@ -97,13 +97,15 @@ def component_realizer(obj: BV, component: str, recursive: bool = True): key = value.unique_name if getattr(value, '__old_class__', value.__class__) in ec_var.__dict__.values(): continue - component._borg.map.prune_vertex_from_edge(component, component._kwargs[key]) - component._borg.map.add_edge(component, old_component._kwargs[key]) + component._global_object.map.prune_vertex_from_edge( + component, component._kwargs[key] + ) + component._global_object.map.add_edge(component, old_component._kwargs[key]) component._kwargs[key] = old_component._kwargs[key] done_mapping = False if done_mapping: - obj._borg.map.prune_vertex_from_edge(obj, old_component) - obj._borg.map.add_edge(obj, new_components) + obj._global_object.map.prune_vertex_from_edge(obj, old_component) + obj._global_object.map.add_edge(obj, new_components) obj._kwargs[component] = new_components @@ -123,7 +125,7 @@ def virtualizer(obj: BV) -> BV: # First check if we're already a virtual object if getattr(obj, '_is_virtual', False): new_obj = deepcopy(obj) - old_obj = obj._borg.map.get_item_by_key(obj._derived_from) + old_obj = obj._global_object.map.get_item_by_key(obj._derived_from) constraint = ObjConstraint(new_obj, '', old_obj) constraint.external = True old_obj._constraints['virtual'][str(obj.unique_name)] = constraint diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/Utils/Hugger/Property.py index 03864e51..daad8458 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/Utils/Hugger/Property.py @@ -11,7 +11,7 @@ from typing import Callable from typing import List -from easyscience import borg +from easyscience import global_object from easyscience.Utils.Hugger.Hugger import PatcherFactory from easyscience.Utils.Hugger.Hugger import Store @@ -24,7 +24,7 @@ class LoggedProperty(property): `BaseObj`. """ - _borg = borg + _global_object = global_object def __init__(self, *args, get_id=None, my_self=None, test_class=None, **kwargs): super(LoggedProperty, self).__init__(*args, **kwargs) @@ -52,7 +52,7 @@ def stack_(frame): return test def __get__(self, instance, owner=None): - if not borg.script.enabled: + if not global_object.script.enabled: return super(LoggedProperty, self).__get__(instance, owner) test = self._caller_class(self.test_class) res = super(LoggedProperty, self).__get__(instance, owner) @@ -60,10 +60,10 @@ def __get__(self, instance, owner=None): def result_item(item_to_be_resulted): if item_to_be_resulted is None: return None - if borg.map.is_known(item_to_be_resulted): - borg.map.change_type(item_to_be_resulted, "returned") + if global_object.map.is_known(item_to_be_resulted): + global_object.map.change_type(item_to_be_resulted, "returned") else: - borg.map.add_vertex(item_to_be_resulted, obj_type="returned") + global_object.map.add_vertex(item_to_be_resulted, obj_type="returned") if not test and self._get_id is not None and self._my_self is not None: if not isinstance(res, list): @@ -72,19 +72,19 @@ def result_item(item_to_be_resulted): for item in res: result_item(item) Store().append_log(self.makeEntry("get", res)) - if borg.debug: # noqa: S1006 + if global_object.debug: # noqa: S1006 print( f"I'm {self._my_self} and {self._get_id} has been called from the outside!" ) return res def __set__(self, instance, value): - if not borg.script.enabled: + if not global_object.script.enabled: return super().__set__(instance, value) test = self._caller_class(self.test_class) if not test and self._get_id is not None and self._my_self is not None: Store().append_log(self.makeEntry("set", value)) - if borg.debug: # noqa: S1006 + if global_object.debug: # noqa: S1006 print( f"I'm {self._my_self} and {self._get_id} has been set to {value} from the outside!" ) @@ -98,51 +98,51 @@ def makeEntry(self, log_type, returns, *args, **kwargs) -> str: returns = [returns] if log_type == "get": for var in returns: - if var.unique_name in borg.map.returned_objs: - index = borg.map.returned_objs.index( + if var.unique_name in global_object.map.returned_objs: + index = global_object.map.returned_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}, " if len(returns) > 0: temp = temp[:-2] temp += " = " - if self._my_self.unique_name in borg.map.created_objs: + if self._my_self.unique_name in global_object.map.created_objs: # for edge in route[::-1]: - index = borg.map.created_objs.index( + index = global_object.map.created_objs.index( self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) - if self._my_self.unique_name in borg.map.created_internal: + if self._my_self.unique_name in global_object.map.created_internal: # We now have to trace.... - route = borg.map.reverse_route(self._my_self) # noqa: F841 - index = borg.map.created_internal.index( + route = global_object.map.reverse_route(self._my_self) # noqa: F841 + index = global_object.map.created_internal.index( self._my_self.unique_name ) temp += ( f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id}" ) elif log_type == "set": - if self._my_self.unique_name in borg.map.created_objs: - index = borg.map.created_objs.index( + if self._my_self.unique_name in global_object.map.created_objs: + index = global_object.map.created_objs.index( self._my_self.unique_name ) temp += f"{self._my_self.__class__.__name__.lower()}_{index}.{self._get_id} = " args = args[1:] for var in args: - if var.unique_name in borg.map.argument_objs: - index = borg.map.argument_objs.index( + if var.unique_name in global_object.map.argument_objs: + index = global_object.map.argument_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.unique_name in borg.map.returned_objs: - index = borg.map.returned_objs.index( + elif var.unique_name in global_object.map.returned_objs: + index = global_object.map.returned_objs.index( var.unique_name ) temp += f"{Store().var_ident}{index}" - elif var.unique_name in borg.map.created_objs: - index = borg.map.created_objs.index(var.unique_name) + elif var.unique_name in global_object.map.created_objs: + index = global_object.map.created_objs.index(var.unique_name) temp += f"{self._my_self.__class__.__name__.lower()}_{index}" else: if isinstance(var, str): @@ -158,7 +158,7 @@ class PropertyHugger(PatcherFactory): # Properties are immutable, so need to be set at the parent level. However unlike `FunctionHugger` we can't traverse # the stack to get the parent. So, it and it's name has to be set at initialization. Boo! - _borg = borg + _global_object = global_object def __init__(self, klass, prop_name): super().__init__() @@ -180,7 +180,7 @@ def patch(self): for key, item in self.__patch_ref.items(): func = getattr(self.property, key) if func is not None: - if borg.debug: + if global_object.debug: print(f"Patching property {self.klass.__name__}.{self.prop_name}") patch_function: Callable = item.get("patcher") new_func = patch_function(func) @@ -188,14 +188,14 @@ def patch(self): setattr(self.klass, self.prop_name, property(**option)) def restore(self): - if borg.debug: + if global_object.debug: print(f"Restoring property {self.klass.__name__}.{self.prop_name}") setattr(self.klass, self.prop_name, self.property) def patch_get(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print( f"{self.klass.__name__}.{self.prop_name} has been called with {args[1:]}, {kwargs}" ) @@ -210,7 +210,7 @@ def inner(*args, **kwargs): def patch_set(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print( f"{self.klass.__name__}.{self.prop_name} has been set with {args[1:]}, {kwargs}" ) @@ -223,7 +223,7 @@ def inner(*args, **kwargs): def patch_del(self, func: Callable) -> Callable: @wraps(func) def inner(*args, **kwargs): - if borg.debug: + if global_object.debug: print(f"{self.klass.__name__}.{self.prop_name} has been deleted.") self._append_log(self.makeEntry("del", None, *args, **kwargs)) return func(*args, **kwargs) diff --git a/src/easyscience/Utils/UndoRedo.py b/src/easyscience/Utils/UndoRedo.py index 4fe83b9e..02b30020 100644 --- a/src/easyscience/Utils/UndoRedo.py +++ b/src/easyscience/Utils/UndoRedo.py @@ -18,7 +18,7 @@ import numpy as np -from easyscience import borg +from easyscience import global_object class UndoCommand(metaclass=abc.ABCMeta): @@ -59,9 +59,9 @@ def inner(obj, *args, **kwargs): # Only do the work to a NotarizedDict. if hasattr(obj, '_stack_enabled') and obj._stack_enabled: if not kwargs: - borg.stack.push(DictStack(obj, *args)) + global_object.stack.push(DictStack(obj, *args)) else: - borg.stack.push(DictStackReCreate(obj, **kwargs)) + global_object.stack.push(DictStackReCreate(obj, **kwargs)) else: func(obj, *args, **kwargs) @@ -75,7 +75,7 @@ class NotarizedDict(UserDict): def __init__(self, **kwargs): super().__init__(**kwargs) - self._borg = borg + self._global_object = global_object self._stack_enabled = False @classmethod @@ -467,10 +467,10 @@ def wrapper(obj, *args) -> NoReturn: if ret: return - if borg.debug: + if global_object.debug: print(f"I'm {obj} and have been set from {old_value} to {new_value}!") - borg.stack.push(PropertyStack(obj, func, old_value, new_value, **kwargs)) + global_object.stack.push(PropertyStack(obj, func, old_value, new_value, **kwargs)) return functools.update_wrapper(wrapper, func) diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 03412a8f..255d1d13 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -11,7 +11,7 @@ from typing import List from typing import Tuple -from easyscience import borg +from easyscience import global_object from easyscience.Utils.Hugger.Property import LoggedProperty if TYPE_CHECKING: @@ -64,8 +64,8 @@ def generatePath(model_obj: B, skip_first: bool = False) -> Tuple[List[int], Lis model_id = model_obj.unique_name for par in pars: elem = par.unique_name - route = borg.map.reverse_route(elem, model_id) - objs = [getattr(borg.map.get_item_by_key(r), "name") for r in route] + route = global_object.map.reverse_route(elem, model_id) + objs = [getattr(global_object.map.get_item_by_key(r), "name") for r in route] objs.reverse() names.append(".".join(objs[start_idx:])) ids.append(elem.int) diff --git a/src/easyscience/Utils/decorators.py b/src/easyscience/Utils/decorators.py index 72d94f05..89db9d4f 100644 --- a/src/easyscience/Utils/decorators.py +++ b/src/easyscience/Utils/decorators.py @@ -10,7 +10,7 @@ import warnings from time import time -from easyscience import borg +from easyscience import global_object class memoized: @@ -67,7 +67,7 @@ def time_it(func): :return: callable function with timer """ name = func.__module__ + "." + func.__name__ - time_logger = borg.log.getLogger("timer." + name) + time_logger = global_object.log.getLogger("timer." + name) @functools.wraps(func) def _time_it(*args, **kwargs): diff --git a/src/easyscience/Utils/io/template.py b/src/easyscience/Utils/io/template.py index 9f37e905..04975a00 100644 --- a/src/easyscience/Utils/io/template.py +++ b/src/easyscience/Utils/io/template.py @@ -222,7 +222,7 @@ def runner(o): d.update({'value': runner(obj.value)}) # pylint: disable=E1101 if hasattr(obj, '_convert_to_dict'): d = obj._convert_to_dict(d, self, skip=skip, **kwargs) - if hasattr(obj, '_borg') and '@id' not in d: + if hasattr(obj, '_global_object') and '@id' not in d: d['@id'] = obj.unique_name return d diff --git a/src/easyscience/__init__.py b/src/easyscience/__init__.py index 606c7204..547ed141 100644 --- a/src/easyscience/__init__.py +++ b/src/easyscience/__init__.py @@ -8,9 +8,9 @@ import pint from easyscience.__version__ import __version__ as __version__ -from easyscience.Objects.Borg import Borg +from easyscience.Objects.global_object import GlobalObject ureg = pint.UnitRegistry() -borg = Borg() -borg.instantiate_stack() -borg.stack.enabled = False +global_object = GlobalObject() +global_object.instantiate_stack() +global_object.stack.enabled = False diff --git a/src/easyscience/fitting/Constraints.py b/src/easyscience/fitting/Constraints.py index 3995f9eb..7f6de1e8 100644 --- a/src/easyscience/fitting/Constraints.py +++ b/src/easyscience/fitting/Constraints.py @@ -21,7 +21,7 @@ import numpy as np from asteval import Interpreter -from easyscience import borg +from easyscience import global_object from easyscience.Objects.core import ComponentSerializer if TYPE_CHECKING: @@ -33,7 +33,7 @@ class ConstraintBase(ComponentSerializer, metaclass=ABCMeta): A base class used to describe a constraint to be applied to EasyScience base objects. """ - _borg = borg + _global_object = global_object def __init__( self, @@ -62,7 +62,7 @@ def __init__( if dependent_obj.__class__.__name__ == 'Parameter': if not dependent_obj.enabled: raise AssertionError('A dependent object needs to be initially enabled.') - if borg.debug: + if global_object.debug: print(f'Dependent variable {dependent_obj}. It should be a `Descriptor`.' f'Setting to fixed') dependent_obj.enabled = False self._finalizer = weakref.finalize(self, cleanup_constraint, self.dependent_obj_ids, True) @@ -154,7 +154,7 @@ def get_obj(self, key: int) -> V: :param key: an EasyScience objects unique key :return: EasyScience object """ - return self._borg.map.get_item_by_key(key) + return self._global_object.map.get_item_by_key(key) C = TypeVar('C', bound=ConstraintBase) @@ -516,8 +516,8 @@ def __repr__(self) -> str: def cleanup_constraint(obj_id: str, enabled: bool): try: - obj = borg.map.get_item_by_key(obj_id) + obj = global_object.map.get_item_by_key(obj_id) obj.enabled = enabled except ValueError: - if borg.debug: + if global_object.debug: print(f'Object with ID {obj_id} has already been deleted') diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 7e7502e8..2091d84f 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -213,11 +213,11 @@ def fit( self._p_0 = {f'p{key}': self._cached_pars[key].raw_value for key in self._cached_pars.keys()} problem = FitProblem(model) - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: model_results = bumps_fit(problem, **default_method, **minimizer_kwargs, **kwargs) @@ -276,7 +276,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars @@ -284,15 +284,15 @@ def _set_parameter_fit_result(self, fit_result, stack_status: bool): for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro('Fitting routine') + global_object.stack.enabled = True + global_object.stack.beginMacro('Fitting routine') for index, name in enumerate(self._cached_model._pnames): dict_name = name[1:] pars[dict_name].value = fit_result.x[index] pars[dict_name].error = fit_result.dx[index] if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: """ diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 47c4d8a8..a30a1668 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -193,11 +193,11 @@ def fit( else: self._p_0 = {f'p{key}': self._cached_pars[key].raw_value for key in self._cached_pars.keys()} - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: model_results = self.dfols_fit(model, **kwargs) @@ -233,15 +233,15 @@ def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars if stack_status: for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro('Fitting routine') + global_object.stack.enabled = True + global_object.stack.beginMacro('Fitting routine') error_matrix = self._error_from_jacobian(fit_result.jacobian, fit_result.resid, ci) for idx, par in enumerate(pars.values()): @@ -249,7 +249,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) par.error = error_matrix[idx, idx] if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: """ diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 6867c0fe..c5b750dc 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -198,11 +198,11 @@ def fit( minimizer_kwargs = {'fit_kws': minimizer_kwargs} minimizer_kwargs.update(engine_kwargs) - # Why do we do this? Because a fitting template has to have borg instantiated outside pre-runtime - from easyscience import borg + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime + from easyscience import global_object - stack_status = borg.stack.enabled - borg.stack.enabled = False + stack_status = global_object.stack.enabled + global_object.stack.enabled = False try: if model is None: @@ -258,15 +258,15 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) :return: None :rtype: noneType """ - from easyscience import borg + from easyscience import global_object pars = self._cached_pars if stack_status: for name in pars.keys(): pars[name].value = self._cached_pars_vals[name][0] pars[name].error = self._cached_pars_vals[name][1] - borg.stack.enabled = True - borg.stack.beginMacro('Fitting routine') + global_object.stack.enabled = True + global_object.stack.beginMacro('Fitting routine') for name in pars.keys(): pars[name].value = fit_result.params['p' + str(name)].value if fit_result.errorbars: @@ -274,7 +274,7 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) else: pars[name].error = 0.0 if stack_status: - borg.stack.endMacro() + global_object.stack.endMacro() def _gen_fit_results(self, fit_results: ModelResult, **kwargs) -> FitResults: """ diff --git a/tests/integration_tests/test_undoRedo.py b/tests/integration_tests/test_undoRedo.py index 6ed4ccd1..a31f061e 100644 --- a/tests/integration_tests/test_undoRedo.py +++ b/tests/integration_tests/test_undoRedo.py @@ -32,9 +32,9 @@ def createParam(option): def doUndoRedo(obj, attr, future, additional=""): - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True e = False def getter(_obj, _attr): @@ -47,16 +47,16 @@ def getter(_obj, _attr): previous = getter(obj, attr) setattr(obj, attr, future) assert getter(obj, attr) == future - assert borg.stack.canUndo() - borg.stack.undo() + assert global_object.stack.canUndo() + global_object.stack.undo() assert getter(obj, attr) == previous - assert borg.stack.canRedo() - borg.stack.redo() + assert global_object.stack.canRedo() + global_object.stack.redo() assert getter(obj, attr) == future except Exception as err: e = err finally: - borg.stack.enabled = False + global_object.stack.enabled = False return e @@ -93,9 +93,9 @@ def test_SinglesUndoRedo(idx, test): @pytest.mark.parametrize("value", (True, False)) def test_Parameter_Bounds_UndoRedo(value): - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True p = Parameter("test", 1, enabled=value) assert p.min == -np.inf assert p.max == np.inf @@ -107,7 +107,7 @@ def test_Parameter_Bounds_UndoRedo(value): assert p.bounds == (0, 2) assert p.enabled is True - borg.stack.undo() + global_object.stack.undo() assert p.min == -np.inf assert p.max == np.inf assert p.bounds == (-np.inf, np.inf) @@ -138,9 +138,9 @@ def test_BaseCollectionUndoRedo(): # assert not doUndoRedo(obj, 'name', name2) - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True original_length = len(obj) p = Parameter("slip_in", 50) @@ -152,12 +152,12 @@ def test_BaseCollectionUndoRedo(): assert item == obj_r # Test inserting items - borg.stack.undo() + global_object.stack.undo() assert len(obj) == original_length _ = objs.pop(idx) for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.redo() + global_object.stack.redo() assert len(obj) == original_length + 1 objs.insert(idx, p) for item, obj_r in zip(obj, objs): @@ -169,13 +169,13 @@ def test_BaseCollectionUndoRedo(): assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.undo() + global_object.stack.undo() assert len(obj) == original_length + 1 objs.insert(idx, p) for item, obj_r in zip(obj, objs): assert item == obj_r del objs[idx] - borg.stack.redo() + global_object.stack.redo() assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r @@ -187,45 +187,45 @@ def test_BaseCollectionUndoRedo(): assert len(obj) == original_length for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.undo() + global_object.stack.undo() for i in range(len(obj)): if i == idx: item = old_item else: item = objs[i] assert obj[i] == item - borg.stack.redo() + global_object.stack.redo() for item, obj_r in zip(obj, objs): assert item == obj_r - borg.stack.enabled = False + global_object.stack.enabled = False def test_UndoRedoMacros(): items = [createSingleObjs(idx) for idx in range(5)] offset = 5 undo_text = "test_macro" - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True - borg.stack.beginMacro(undo_text) + global_object.stack.enabled = True + global_object.stack.beginMacro(undo_text) values = [item.raw_value for item in items] for item, value in zip(items, values): item.value = value + offset - borg.stack.endMacro() + global_object.stack.endMacro() for item, old_value in zip(items, values): assert item.raw_value == old_value + offset - assert borg.stack.undoText() == undo_text + assert global_object.stack.undoText() == undo_text - borg.stack.undo() + global_object.stack.undo() for item, old_value in zip(items, values): assert item.raw_value == old_value - assert borg.stack.redoText() == undo_text + assert global_object.stack.redoText() == undo_text - borg.stack.redo() + global_object.stack.redo() for item, old_value in zip(items, values): assert item.raw_value == old_value + offset @@ -272,21 +272,21 @@ def __call__(self, x: np.ndarray) -> np.ndarray: except AttributeError: pytest.skip(msg=f"{fit_engine} is not installed") - from easyscience import borg + from easyscience import global_object - borg.stack.enabled = True + global_object.stack.enabled = True res = f.fit(x, y) # assert l1.c.raw_value == pytest.approx(l2.c.raw_value, rel=l2.c.error * 3) # assert l1.m.raw_value == pytest.approx(l2.m.raw_value, rel=l2.m.error * 3) - assert borg.stack.undoText() == "Fitting routine" + assert global_object.stack.undoText() == "Fitting routine" - borg.stack.undo() + global_object.stack.undo() assert l2.m.raw_value == m_sp assert l2.c.raw_value == c_sp - assert borg.stack.redoText() == "Fitting routine" + assert global_object.stack.redoText() == "Fitting routine" - borg.stack.redo() + global_object.stack.redo() assert l2.m.raw_value == res.p[f"p{l2.m.unique_name}"] assert l2.c.raw_value == res.p[f"p{l2.c.unique_name}"] @@ -305,8 +305,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # result_value = f_fun(a, b) # result_error = (sa ** 2 + sb ** 2) ** 0.5 # -# from easyscience import borg -# borg.stack.enabled = True +# from easyscience import global_object +# global_object.stack.enabled = True # # # Perform basic test # p1 = Parameter('a', a) @@ -314,9 +314,9 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # # p1 = p_fun(p1, p2) # assert float(p1) == result_value -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # # # Perform basic + error @@ -325,10 +325,10 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # p1 = p_fun(p1, p2) # assert float(p1) == result_value # assert p1.error == result_error -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # @@ -339,11 +339,11 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == 'meter / second' -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa # assert str(p1.unit) == 'meter / second' -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == 'meter / second' @@ -367,8 +367,8 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # result_value = f_fun(a, b) # result_error = ((sa / a) ** 2 + (sb / b) ** 2) ** 0.5 * result_value # -# from easyscience import borg -# borg.stack.enabled = True +# from easyscience import global_object +# global_object.stack.enabled = True # # # Perform basic test # p1 = Parameter('a', a) @@ -376,9 +376,9 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # # p1 = p_fun(p1, p2) # assert float(p1) == result_value -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # # # Perform basic + error @@ -387,10 +387,10 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # p1 = p_fun(p1, p2) # assert float(p1) == result_value # assert p1.error == result_error -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # @@ -401,11 +401,11 @@ def __call__(self, x: np.ndarray) -> np.ndarray: # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == u_str -# borg.stack.undo() +# global_object.stack.undo() # assert float(p1) == a # assert p1.error == sa # assert str(p1.unit) == unit -# borg.stack.redo() +# global_object.stack.redo() # assert float(p1) == result_value # assert p1.error == result_error # assert str(p1.unit) == u_str diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index 197dbcae..cf71ccb8 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -20,7 +20,7 @@ from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter from easyscience.Utils.io.dict import DictSerializer -from easyscience import borg +from easyscience import global_object @pytest.fixture @@ -195,7 +195,7 @@ def check_dict(check, item): if isinstance(check, dict) and isinstance(item, dict): if "@module" in item.keys(): with not_raises([ValueError, AttributeError]): - borg.map._clear() + global_object.map._clear() this_obj = DictSerializer().decode(item) for key in check.keys(): @@ -363,7 +363,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 @@ -404,7 +404,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 @@ -429,7 +429,7 @@ def from_pars(cls, a: float): a_start = 5 a_end = 10 a = A.from_pars(a_start) - graph = a._borg.map + graph = a._global_object.map assert a.a.raw_value == a_start assert len(graph.get_edges(a)) == 1 diff --git a/tests/unit_tests/Objects/test_Descriptor_Parameter.py b/tests/unit_tests/Objects/test_Descriptor_Parameter.py index b8745b0e..477e42b9 100644 --- a/tests/unit_tests/Objects/test_Descriptor_Parameter.py +++ b/tests/unit_tests/Objects/test_Descriptor_Parameter.py @@ -16,7 +16,7 @@ from easyscience.Objects.Variable import CoreSetException from easyscience.Objects.Variable import Descriptor from easyscience.Objects.Variable import Parameter -from easyscience.Objects.Variable import borg +from easyscience.Objects.Variable import global_object from easyscience.Objects.Variable import ureg @@ -132,7 +132,7 @@ def test_Parameter_value_get(element, expected): @pytest.mark.parametrize("enabled", (None, True, False)) @pytest.mark.parametrize("instance", (Descriptor, Parameter), indirect=True) def test_item_value_set(instance, enabled, debug): - borg.debug = debug + global_object.debug = debug set_value = 2 d = instance("test", 1) if enabled is not None: diff --git a/tests/unit_tests/Objects/test_Groups.py b/tests/unit_tests/Objects/test_Groups.py index c4e14a7e..8314b956 100644 --- a/tests/unit_tests/Objects/test_Groups.py +++ b/tests/unit_tests/Objects/test_Groups.py @@ -14,7 +14,7 @@ from easyscience.Objects.ObjectClasses import BaseObj from easyscience.Objects.ObjectClasses import Descriptor from easyscience.Objects.ObjectClasses import Parameter -from easyscience import borg +from easyscience import global_object test_dict = { "@module": "easyscience.Objects.Groups", @@ -449,7 +449,7 @@ def test_baseCollection_iterator_dict(cls): obj = cls(name, *l_object) d = obj.as_dict() - borg.map._clear() + global_object.map._clear() obj2 = cls.from_dict(d) for index, item in enumerate(obj2): @@ -491,7 +491,7 @@ def test_baseCollection_set_index(cls): assert obj[idx] == p2 obj[idx] = p4 assert obj[idx] == p4 - edges = obj._borg.map.get_edges(obj) + edges = obj._global_object.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: assert item.unique_name in edges @@ -515,7 +515,7 @@ def test_baseCollection_set_index_based(cls): assert obj[idx] == p4 obj[idx] = d assert obj[idx] == d - edges = obj._borg.map.get_edges(obj) + edges = obj._global_object.map.get_edges(obj) assert len(edges) == len(obj) for item in obj: assert item.unique_name in edges @@ -551,9 +551,9 @@ class Beta(BaseObj): @pytest.mark.parametrize("cls", class_constructors) def test_basecollectionGraph(cls): - from easyscience import borg + from easyscience import global_object - G = borg.map + G = global_object.map name = "test" v = [1, 2] p = [Parameter(f"p{i}", v[i]) for i in range(len(v))] diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 92f8e9cd..8149d765 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -6,30 +6,30 @@ from easyscience.Objects.Variable import Parameter from easyscience.Objects.ObjectClasses import BaseObj import pytest -from easyscience import borg +from easyscience import global_object class TestGraph: @pytest.fixture def clear(self): - borg.map._clear() + global_object.map._clear() def test_clear(self, clear): test_obj = BaseObj("test") - assert len(borg.map._store) == 1 - assert len(borg.map._Graph__graph_dict) == 1 - assert borg.map._name_iterator_dict == {"BaseObj": 0} - borg.map._clear() - assert len(borg.map._store) == 0 - assert borg.map._Graph__graph_dict == {} - assert borg.map._name_iterator_dict == {} + assert len(global_object.map._store) == 1 + assert len(global_object.map._Graph__graph_dict) == 1 + assert global_object.map._name_iterator_dict == {"BaseObj": 0} + global_object.map._clear() + assert len(global_object.map._store) == 0 + assert global_object.map._Graph__graph_dict == {} + assert global_object.map._name_iterator_dict == {} def test_add_vertex(self, clear): test_obj = BaseObj("test") - assert len(borg.map._store) == 1 - assert len(borg.map._Graph__graph_dict) == 1 - assert borg.map._name_iterator_dict == {"BaseObj": 0} + assert len(global_object.map._store) == 1 + assert len(global_object.map._Graph__graph_dict) == 1 + assert global_object.map._name_iterator_dict == {"BaseObj": 0} @pytest.mark.parametrize("name", ["test", "test2", "test3"]) def test_clear_fixture(self, name, clear): test_obj= BaseObj(name, unique_name=name) - assert len(borg.map._store) == 1 \ No newline at end of file + assert len(global_object.map._store) == 1 \ No newline at end of file diff --git a/tests/unit_tests/utils/io_tests/test_dict.py b/tests/unit_tests/utils/io_tests/test_dict.py index 5f4b6b2a..40286e27 100644 --- a/tests/unit_tests/utils/io_tests/test_dict.py +++ b/tests/unit_tests/utils/io_tests/test_dict.py @@ -18,7 +18,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: @@ -249,7 +249,7 @@ def test_custom_class_full_decode_with_numpy(): obj = B(Descriptor("a", 1.0), np.array([1.0, 2.0, 3.0])) full_enc = obj.encode(encoder=DictSerializer, full_encode=True) - borg.map._clear() + global_object.map._clear() obj2 = B.decode(full_enc, decoder=DictSerializer) assert obj.name == obj2.name assert obj.a.raw_value == obj2.a.raw_value @@ -270,7 +270,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) - borg.map._clear() + global_object.map._clear() dec = dp_cls.decode(enc, decoder=DictSerializer) for k in data_dict.keys(): @@ -291,7 +291,7 @@ def test_variable_DictSerializer_from_dict(dp_kwargs: dict, dp_cls: Type[Descrip data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=DictSerializer) - borg.map._clear() + global_object.map._clear() dec = dp_cls.from_dict(enc) for k in data_dict.keys(): diff --git a/tests/unit_tests/utils/io_tests/test_json.py b/tests/unit_tests/utils/io_tests/test_json.py index e48787b7..17442c4a 100644 --- a/tests/unit_tests/utils/io_tests/test_json.py +++ b/tests/unit_tests/utils/io_tests/test_json.py @@ -15,7 +15,7 @@ from .test_core import check_dict from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: @@ -179,7 +179,7 @@ def test_variable_DictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descriptor data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonSerializer) - borg.map._clear() + global_object.map._clear() assert isinstance(enc, str) dec = obj.decode(enc, decoder=JsonSerializer) @@ -201,6 +201,6 @@ def test_variable_DataDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descri data_dict["raw_value"] = data_dict.pop("value") enc = obj.encode(encoder=JsonDataSerializer) - borg.map._clear() + global_object.map._clear() with pytest.raises(NotImplementedError): dec = obj.decode(enc, decoder=JsonDataSerializer) diff --git a/tests/unit_tests/utils/io_tests/test_xml.py b/tests/unit_tests/utils/io_tests/test_xml.py index b1d35040..562ceae1 100644 --- a/tests/unit_tests/utils/io_tests/test_xml.py +++ b/tests/unit_tests/utils/io_tests/test_xml.py @@ -14,7 +14,7 @@ from .test_core import Descriptor from .test_core import dp_param_dict from .test_core import skip_dict -from easyscience import borg +from easyscience import global_object def recursive_remove(d, remove_keys: list) -> dict: """ @@ -115,7 +115,7 @@ def test_variable_XMLDictSerializer_decode(dp_kwargs: dict, dp_cls: Type[Descrip assert isinstance(enc, str) data_xml = ET.XML(enc) assert data_xml.tag == "data" - borg.map._clear() + global_object.map._clear() dec = dp_cls.decode(enc, decoder=XMLSerializer) for k in data_dict.keys(): From 3d5e8c30fe6ff0e5637696d7b756ebff527b40e1 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Tue, 2 Jul 2024 16:11:19 +0200 Subject: [PATCH 42/57] Restructure sourcecode tree --- src/easyscience/Fitting/Fitting.py | 2 +- src/easyscience/Objects/Groups.py | 2 +- src/easyscience/Objects/Variable.py | 2 +- src/easyscience/Utils/classTools.py | 2 +- src/easyscience/__init__.py | 2 +- src/easyscience/global_object/__init__.py | 4 + .../global_object.py | 13 ++-- .../hugger}/__init__.py | 0 .../hugger/hugger.py} | 0 .../hugger/property.py} | 5 +- .../Logging.py => global_object/logger.py} | 0 .../Graph.py => global_object/map.py} | 78 +++++++++---------- .../undo_redo.py} | 0 tests/unit_tests/Objects/test_graph.py | 10 +-- 14 files changed, 63 insertions(+), 57 deletions(-) create mode 100644 src/easyscience/global_object/__init__.py rename src/easyscience/{Objects => global_object}/global_object.py (83%) rename src/easyscience/{Utils/Hugger => global_object/hugger}/__init__.py (100%) rename src/easyscience/{Utils/Hugger/Hugger.py => global_object/hugger/hugger.py} (100%) rename src/easyscience/{Utils/Hugger/Property.py => global_object/hugger/property.py} (99%) rename src/easyscience/{Utils/Logging.py => global_object/logger.py} (100%) rename src/easyscience/{Objects/Graph.py => global_object/map.py} (81%) rename src/easyscience/{Utils/UndoRedo.py => global_object/undo_redo.py} (100%) diff --git a/src/easyscience/Fitting/Fitting.py b/src/easyscience/Fitting/Fitting.py index 3ffaad77..1496ae93 100644 --- a/src/easyscience/Fitting/Fitting.py +++ b/src/easyscience/Fitting/Fitting.py @@ -19,8 +19,8 @@ import numpy as np import easyscience.Fitting as Fitting -from easyscience import global_object from easyscience import default_fitting_engine +from easyscience import global_object from easyscience.Objects.Groups import BaseCollection _C = TypeVar('_C', bound=ABCMeta) diff --git a/src/easyscience/Objects/Groups.py b/src/easyscience/Objects/Groups.py index 896e9dad..504688e6 100644 --- a/src/easyscience/Objects/Groups.py +++ b/src/easyscience/Objects/Groups.py @@ -17,9 +17,9 @@ from typing import Tuple from typing import Union +from easyscience.global_object.undo_redo import NotarizedDict from easyscience.Objects.ObjectClasses import BasedBase from easyscience.Objects.ObjectClasses import Descriptor -from easyscience.Utils.UndoRedo import NotarizedDict if TYPE_CHECKING: from easyscience.Utils.typing import B diff --git a/src/easyscience/Objects/Variable.py b/src/easyscience/Objects/Variable.py index 7676d27d..42c3bedd 100644 --- a/src/easyscience/Objects/Variable.py +++ b/src/easyscience/Objects/Variable.py @@ -31,10 +31,10 @@ from easyscience import pint from easyscience import ureg from easyscience.fitting.Constraints import SelfConstraint +from easyscience.global_object.undo_redo import property_stack_deco from easyscience.Objects.core import ComponentSerializer from easyscience.Utils.classTools import addProp from easyscience.Utils.Exceptions import CoreSetException -from easyscience.Utils.UndoRedo import property_stack_deco if TYPE_CHECKING: from easyscience.Utils.typing import C diff --git a/src/easyscience/Utils/classTools.py b/src/easyscience/Utils/classTools.py index 255d1d13..6e29eae7 100644 --- a/src/easyscience/Utils/classTools.py +++ b/src/easyscience/Utils/classTools.py @@ -12,7 +12,7 @@ from typing import Tuple from easyscience import global_object -from easyscience.Utils.Hugger.Property import LoggedProperty +from easyscience.global_object.hugger.property import LoggedProperty if TYPE_CHECKING: from easyscience.Utils.typing import BV diff --git a/src/easyscience/__init__.py b/src/easyscience/__init__.py index 547ed141..9d6c91e0 100644 --- a/src/easyscience/__init__.py +++ b/src/easyscience/__init__.py @@ -8,7 +8,7 @@ import pint from easyscience.__version__ import __version__ as __version__ -from easyscience.Objects.global_object import GlobalObject +from easyscience.global_object import GlobalObject ureg = pint.UnitRegistry() global_object = GlobalObject() diff --git a/src/easyscience/global_object/__init__.py b/src/easyscience/global_object/__init__.py new file mode 100644 index 00000000..0bea5e61 --- /dev/null +++ b/src/easyscience/global_object/__init__.py @@ -0,0 +1,4 @@ +from .global_object import GlobalObject # noqa: F401 +from .hugger.hugger import ScriptManager # noqa: F401 +from .logger import Logger # noqa: F401 +from .map import Map # noqa: F401 diff --git a/src/easyscience/Objects/global_object.py b/src/easyscience/global_object/global_object.py similarity index 83% rename from src/easyscience/Objects/global_object.py rename to src/easyscience/global_object/global_object.py index f936447e..281115f5 100644 --- a/src/easyscience/Objects/global_object.py +++ b/src/easyscience/global_object/global_object.py @@ -5,10 +5,11 @@ __author__ = "github.com/wardsimon" __version__ = "0.1.0" -from easyscience.Objects.Graph import Graph from easyscience.Utils.classUtils import singleton -from easyscience.Utils.Hugger.Hugger import ScriptManager -from easyscience.Utils.Logging import Logger + +from .hugger.hugger import ScriptManager +from .logger import Logger +from .map import Map @singleton @@ -19,7 +20,7 @@ class GlobalObject: """ __log = Logger() - __map = Graph() + __map = Map() __stack = None __debug = False @@ -33,7 +34,7 @@ def __init__(self): # self.script: ScriptManager = ScriptManager() # Map. This is the conduit database between all borg species - self.map: Graph = self.__map + self.map: Map = self.__map def instantiate_stack(self): """ @@ -43,6 +44,6 @@ def instantiate_stack(self): :return: None :rtype: noneType """ - from easyscience.Utils.UndoRedo import UndoStack + from easyscience.global_object.undo_redo import UndoStack self.stack = UndoStack() diff --git a/src/easyscience/Utils/Hugger/__init__.py b/src/easyscience/global_object/hugger/__init__.py similarity index 100% rename from src/easyscience/Utils/Hugger/__init__.py rename to src/easyscience/global_object/hugger/__init__.py diff --git a/src/easyscience/Utils/Hugger/Hugger.py b/src/easyscience/global_object/hugger/hugger.py similarity index 100% rename from src/easyscience/Utils/Hugger/Hugger.py rename to src/easyscience/global_object/hugger/hugger.py diff --git a/src/easyscience/Utils/Hugger/Property.py b/src/easyscience/global_object/hugger/property.py similarity index 99% rename from src/easyscience/Utils/Hugger/Property.py rename to src/easyscience/global_object/hugger/property.py index daad8458..ac0bfcbd 100644 --- a/src/easyscience/Utils/Hugger/Property.py +++ b/src/easyscience/global_object/hugger/property.py @@ -12,8 +12,9 @@ from typing import List from easyscience import global_object -from easyscience.Utils.Hugger.Hugger import PatcherFactory -from easyscience.Utils.Hugger.Hugger import Store + +from .hugger import PatcherFactory +from .hugger import Store class LoggedProperty(property): diff --git a/src/easyscience/Utils/Logging.py b/src/easyscience/global_object/logger.py similarity index 100% rename from src/easyscience/Utils/Logging.py rename to src/easyscience/global_object/logger.py diff --git a/src/easyscience/Objects/Graph.py b/src/easyscience/global_object/map.py similarity index 81% rename from src/easyscience/Objects/Graph.py rename to src/easyscience/global_object/map.py index 734bf147..1f89b130 100644 --- a/src/easyscience/Objects/Graph.py +++ b/src/easyscience/global_object/map.py @@ -20,7 +20,7 @@ def __init__(self, *args, my_type=None, **kwargs): self._type.append(my_type) def __repr__(self) -> str: - s = "Graph entry of type: " + s = "Map entry of type: " if self._type: s += ", ".join(self._type) else: @@ -68,21 +68,21 @@ def is_returned(self) -> bool: return "returned" in self._type -class Graph: +class Map: def __init__(self): # A dictionary of object names and their corresponding objects self._store = weakref.WeakValueDictionary() # A dict with object names as keys and a list of their object types as values, with weak references - self.__graph_dict = {} + self.__type_dict = {} # A dictionary of class names and their corresponding default name_generator iterators self._name_iterator_dict = {} def vertices(self) -> List[str]: - """returns the vertices of a graph""" + """returns the vertices of a map""" return list(self._store.keys()) def edges(self): - """returns the edges of a graph""" + """returns the edges of a map""" return self.__generate_edges() @property @@ -104,7 +104,7 @@ def returned_objs(self) -> List[str]: def _nested_get(self, obj_type: str) -> List[str]: """Access a nested object in root by key sequence.""" extracted_list = [] - for key, item in self.__graph_dict.items(): + for key, item in self.__type_dict.items(): if obj_type in item.type: extracted_list.append(key) return extracted_list @@ -120,7 +120,7 @@ def _get_name_iterator(self, class_name: str) -> int: def get_item_by_key(self, item_id: str) -> object: if item_id in self._store.keys(): return self._store[item_id] - raise ValueError("Item not in graph.") + raise ValueError("Item not in map.") def is_known(self, vertex: object) -> bool: # All objects should have a 'unique_name' attribute @@ -128,48 +128,48 @@ def is_known(self, vertex: object) -> bool: def find_type(self, vertex: object) -> List[str]: if self.is_known(vertex): - return self.__graph_dict[vertex.unique_name].type + return self.__type_dict[vertex.unique_name].type def reset_type(self, obj, default_type: str): - if obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[obj.unique_name].reset_type(default_type) + if obj.unique_name in self.__type_dict.keys(): + self.__type_dict[obj.unique_name].reset_type(default_type) def change_type(self, obj, new_type: str): - if obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[obj.unique_name].type = new_type + if obj.unique_name in self.__type_dict.keys(): + self.__type_dict[obj.unique_name].type = new_type def add_vertex(self, obj: object, obj_type: str = None): name = obj.unique_name if name in self._store.keys(): raise ValueError(f"Object name {name} already exists in the graph.") self._store[name] = obj - self.__graph_dict[name] = _EntryList() # Add objects type to the list of types - self.__graph_dict[name].finalizer = weakref.finalize( + self.__type_dict[name] = _EntryList() # Add objects type to the list of types + self.__type_dict[name].finalizer = weakref.finalize( self._store[name], self.prune, name ) - self.__graph_dict[name].type = obj_type + self.__type_dict[name].type = obj_type def add_edge(self, start_obj: object, end_obj: object): - if start_obj.unique_name in self.__graph_dict.keys(): - self.__graph_dict[start_obj.unique_name].append(end_obj.unique_name) + if start_obj.unique_name in self.__type_dict.keys(): + self.__type_dict[start_obj.unique_name].append(end_obj.unique_name) else: - raise AttributeError("Start object not in graph.") + raise AttributeError("Start object not in map.") def get_edges(self, start_obj) -> List[str]: - if start_obj.unique_name in self.__graph_dict.keys(): - return list(self.__graph_dict[start_obj.unique_name]) + if start_obj.unique_name in self.__type_dict.keys(): + return list(self.__type_dict[start_obj.unique_name]) else: raise AttributeError def __generate_edges(self) -> list: """A static method generating the edges of the - graph "graph". Edges are represented as sets + map. Edges are represented as sets with one (a loop back to the vertex) or two vertices """ edges = [] - for vertex in self.__graph_dict: - for neighbour in self.__graph_dict[vertex]: + for vertex in self.__type_dict: + for neighbour in self.__type_dict[vertex]: if {neighbour, vertex} not in edges: edges.append({vertex, neighbour}) return edges @@ -181,19 +181,19 @@ def prune_vertex_from_edge(self, parent_obj, child_obj): vertex2 = child_obj.unique_name if ( - vertex1 in self.__graph_dict.keys() - and vertex2 in self.__graph_dict[vertex1] + vertex1 in self.__type_dict.keys() + and vertex2 in self.__type_dict[vertex1] ): - del self.__graph_dict[vertex1][self.__graph_dict[vertex1].index(vertex2)] + del self.__type_dict[vertex1][self.__type_dict[vertex1].index(vertex2)] def prune(self, key: str): - if key in self.__graph_dict.keys(): - del self.__graph_dict[key] + if key in self.__type_dict.keys(): + del self.__type_dict[key] del self._store[key] def find_isolated_vertices(self) -> list: """returns a list of isolated vertices.""" - graph = self.__graph_dict + graph = self.__type_dict isolated = [] for vertex in graph: print(isolated, vertex) @@ -203,7 +203,7 @@ def find_isolated_vertices(self) -> list: def find_path(self, start_obj, end_obj, path=[]) -> list: """find a path from start_vertex to end_vertex - in graph""" + in map""" try: start_vertex = start_obj.unique_name @@ -212,7 +212,7 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: start_vertex = start_obj end_vertex = end_obj - graph = self.__graph_dict + graph = self.__type_dict path = path + [start_vertex] if start_vertex == end_vertex: return path @@ -227,12 +227,12 @@ def find_path(self, start_obj, end_obj, path=[]) -> list: def find_all_paths(self, start_obj, end_obj, path=[]) -> list: """find all paths from start_vertex to - end_vertex in graph""" + end_vertex in map""" start_vertex = start_obj.unique_name end_vertex = end_obj.unique_name - graph = self.__graph_dict + graph = self.__type_dict path = path + [start_vertex] if start_vertex == end_vertex: return [path] @@ -263,7 +263,7 @@ def reverse_route(self, end_obj, start_obj=None) -> List: optimum_path = [] if start_obj is None: # We now have to find where to begin..... - for possible_start, vertices in self.__graph_dict.items(): + for possible_start, vertices in self.__type_dict.items(): if end_vertex in vertices: temp_path = self.find_path(possible_start, end_vertex) if len(temp_path) < path_length: @@ -275,10 +275,10 @@ def reverse_route(self, end_obj, start_obj=None) -> List: return optimum_path def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: - """determines if the graph is connected""" + """determines if the map is connected""" if vertices_encountered is None: vertices_encountered = set() - graph = self.__graph_dict + graph = self.__type_dict vertices = list(graph.keys()) if not start_vertex: # chose a vertex from graph as a starting point @@ -295,11 +295,11 @@ def is_connected(self, vertices_encountered=None, start_vertex=None) -> bool: return False def _clear(self): - """ Reset the graph to an empty state. """ + """ Reset the map to an empty state. """ self._store = weakref.WeakValueDictionary() - self.__graph_dict = {} + self.__type_dict = {} self._name_iterator_dict = {} def __repr__(self) -> str: - return f"Graph object of {len(self._store)} vertices." + return f"Map object of {len(self._store)} vertices." diff --git a/src/easyscience/Utils/UndoRedo.py b/src/easyscience/global_object/undo_redo.py similarity index 100% rename from src/easyscience/Utils/UndoRedo.py rename to src/easyscience/global_object/undo_redo.py diff --git a/tests/unit_tests/Objects/test_graph.py b/tests/unit_tests/Objects/test_graph.py index 8149d765..8048e823 100644 --- a/tests/unit_tests/Objects/test_graph.py +++ b/tests/unit_tests/Objects/test_graph.py @@ -2,13 +2,13 @@ # SPDX-License-Identifier: BSD-3-Clause # © 2021-2023 Contributors to the EasyScience project Date: Wed, 3 Jul 2024 11:54:23 +0200 Subject: [PATCH 43/57] restructure test folder --- tests/integration_tests/__init__.py | 6 ------ .../{Objects/test_graph.py => global_object/test_map.py} | 0 .../global_object/test_undo_redo.py} | 0 3 files changed, 6 deletions(-) delete mode 100644 tests/integration_tests/__init__.py rename tests/unit_tests/{Objects/test_graph.py => global_object/test_map.py} (100%) rename tests/{integration_tests/test_undoRedo.py => unit_tests/global_object/test_undo_redo.py} (100%) diff --git a/tests/integration_tests/__init__.py b/tests/integration_tests/__init__.py deleted file mode 100644 index 22e236a6..00000000 --- a/tests/integration_tests/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# SPDX-FileCopyrightText: 2023 EasyScience contributors -# SPDX-License-Identifier: BSD-3-Clause -# © 2021-2023 Contributors to the EasyScience project Date: Wed, 3 Jul 2024 12:18:24 +0200 Subject: [PATCH 44/57] more tests --- tests/unit_tests/global_object/test_map.py | 36 ++++++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/unit_tests/global_object/test_map.py b/tests/unit_tests/global_object/test_map.py index 8048e823..f7982ff8 100644 --- a/tests/unit_tests/global_object/test_map.py +++ b/tests/unit_tests/global_object/test_map.py @@ -6,6 +6,7 @@ from easyscience.Objects.Variable import Parameter from easyscience.Objects.ObjectClasses import BaseObj import pytest +import gc from easyscience import global_object class TestMap: @@ -14,7 +15,7 @@ def clear(self): global_object.map._clear() def test_clear(self, clear): - test_obj = BaseObj("test") + test_obj = BaseObj(name="test") assert len(global_object.map._store) == 1 assert len(global_object.map._Map__type_dict) == 1 assert global_object.map._name_iterator_dict == {"BaseObj": 0} @@ -24,12 +25,35 @@ def test_clear(self, clear): assert global_object.map._name_iterator_dict == {} def test_add_vertex(self, clear): - test_obj = BaseObj("test") + test_obj = BaseObj(name="test") assert len(global_object.map._store) == 1 assert len(global_object.map._Map__type_dict) == 1 assert global_object.map._name_iterator_dict == {"BaseObj": 0} - @pytest.mark.parametrize("name", ["test", "test2", "test3"]) - def test_clear_fixture(self, name, clear): - test_obj= BaseObj(name, unique_name=name) - assert len(global_object.map._store) == 1 \ No newline at end of file + def test_weakref(self, clear): + test_obj = BaseObj(name="test") + assert len(global_object.map._store) == 1 + assert len(global_object.map._Map__type_dict) == 1 + del test_obj + gc.collect() + assert len(global_object.map._store) == 0 + assert len(global_object.map._Map__type_dict) == 0 + + def test_vertices(self, clear): + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map.vertices() == ["BaseObj_0", "Parameter_0"] + + def test_get_item_by_key(self, clear): + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map.get_item_by_key(test_obj.unique_name) == test_obj + assert global_object.map.get_item_by_key(test_obj2.unique_name) == test_obj2 + + def test_get_name_iterator(self, clear): + assert global_object.map._get_name_iterator("BaseObj") == 0 + assert global_object.map._get_name_iterator("Parameter") == 0 + test_obj = BaseObj(name="test") + test_obj2 = Parameter(value=2.0, name="test2") + assert global_object.map._get_name_iterator("BaseObj") == 2 + assert global_object.map._get_name_iterator("Parameter") == 2 \ No newline at end of file From 7c64c6f92206d146b809faf50854c93d4f4653af Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Thu, 4 Jul 2024 10:30:23 +0200 Subject: [PATCH 45/57] test for identical unique names --- src/easyscience/Objects/ObjectClasses.py | 3 ++- tests/unit_tests/global_object/test_map.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/easyscience/Objects/ObjectClasses.py b/src/easyscience/Objects/ObjectClasses.py index 43bbb6a7..6379b0e4 100644 --- a/src/easyscience/Objects/ObjectClasses.py +++ b/src/easyscience/Objects/ObjectClasses.py @@ -241,6 +241,7 @@ class BaseObj(BasedBase): def __init__( self, name: str, + unique_name: Optional[str] = None, *args: Optional[BV], **kwargs: Optional[BV], ): @@ -251,7 +252,7 @@ def __init__( :param args: Any arguments? :param kwargs: Fields which this class should contain """ - super(BaseObj, self).__init__(name) + super(BaseObj, self).__init__(name=name, unique_name=unique_name) # If Parameter or Descriptor is given as arguments... for arg in args: if issubclass(type(arg), (BaseObj, Descriptor, DescriptorBase)): diff --git a/tests/unit_tests/global_object/test_map.py b/tests/unit_tests/global_object/test_map.py index f7982ff8..f751e7b5 100644 --- a/tests/unit_tests/global_object/test_map.py +++ b/tests/unit_tests/global_object/test_map.py @@ -56,4 +56,13 @@ def test_get_name_iterator(self, clear): test_obj = BaseObj(name="test") test_obj2 = Parameter(value=2.0, name="test2") assert global_object.map._get_name_iterator("BaseObj") == 2 - assert global_object.map._get_name_iterator("Parameter") == 2 \ No newline at end of file + assert global_object.map._get_name_iterator("Parameter") == 2 + + @pytest.mark.parametrize("cls, kwargs", [(BaseObj, {}), (Parameter, {"value": 2.0})]) + def test_identical_unique_names_exception(self, clear, cls, kwargs): + test_obj = cls(name="test", unique_name="test", **kwargs) + with pytest.raises(ValueError): + test_obj2 = cls(name="test2", unique_name="test", **kwargs) + + # test unique_name change + From 0b625a66f60d48745111713bf7a3f098ab33250f Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Fri, 5 Jul 2024 13:25:04 +0200 Subject: [PATCH 46/57] more tests --- tests/unit_tests/Objects/test_BaseObj.py | 4 +- tests/unit_tests/global_object/test_map.py | 48 +++++++++++++--------- 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/tests/unit_tests/Objects/test_BaseObj.py b/tests/unit_tests/Objects/test_BaseObj.py index cf71ccb8..f552df12 100644 --- a/tests/unit_tests/Objects/test_BaseObj.py +++ b/tests/unit_tests/Objects/test_BaseObj.py @@ -58,7 +58,7 @@ def not_raises( ([], ["par1"]), (["par1"], []), (["par1"], ["par2"]), - (["par1", "des1"], ["par2", "des2"]), + (["par1", "des1"], ["par2", "des2"]), ], ) def test_baseobj_create(setup_pars: dict, a: List[str], kw: List[str]): @@ -69,7 +69,7 @@ def test_baseobj_create(setup_pars: dict, a: List[str], kw: List[str]): kwargs = {} for key in kw: kwargs[key] = setup_pars[key] - base = BaseObj(name, *args, **kwargs) + base = BaseObj(name, None, *args, **kwargs) assert base.name == name for key in a: item = getattr(base, setup_pars[key].name) diff --git a/tests/unit_tests/global_object/test_map.py b/tests/unit_tests/global_object/test_map.py index f751e7b5..fdb29008 100644 --- a/tests/unit_tests/global_object/test_map.py +++ b/tests/unit_tests/global_object/test_map.py @@ -14,22 +14,28 @@ class TestMap: def clear(self): global_object.map._clear() - def test_clear(self, clear): - test_obj = BaseObj(name="test") + @pytest.fixture + def base_object(self): + return BaseObj(name="test") + + @pytest.fixture + def parameter_object(self): + return Parameter(value=2.0, name="test2") + + def test_add_vertex(self, clear, base_object, parameter_object): + assert len(global_object.map._store) == 2 + assert len(global_object.map._Map__type_dict) == 2 + assert len(global_object.map._name_iterator_dict) == 2 + + def test_clear(self, clear, base_object): assert len(global_object.map._store) == 1 assert len(global_object.map._Map__type_dict) == 1 - assert global_object.map._name_iterator_dict == {"BaseObj": 0} + assert len(global_object.map._name_iterator_dict) == 1 global_object.map._clear() assert len(global_object.map._store) == 0 assert global_object.map._Map__type_dict == {} assert global_object.map._name_iterator_dict == {} - def test_add_vertex(self, clear): - test_obj = BaseObj(name="test") - assert len(global_object.map._store) == 1 - assert len(global_object.map._Map__type_dict) == 1 - assert global_object.map._name_iterator_dict == {"BaseObj": 0} - def test_weakref(self, clear): test_obj = BaseObj(name="test") assert len(global_object.map._store) == 1 @@ -39,16 +45,12 @@ def test_weakref(self, clear): assert len(global_object.map._store) == 0 assert len(global_object.map._Map__type_dict) == 0 - def test_vertices(self, clear): - test_obj = BaseObj(name="test") - test_obj2 = Parameter(value=2.0, name="test2") - assert global_object.map.vertices() == ["BaseObj_0", "Parameter_0"] + def test_vertices(self, clear, base_object, parameter_object): + assert global_object.map.vertices() == [base_object.unique_name, parameter_object.unique_name] - def test_get_item_by_key(self, clear): - test_obj = BaseObj(name="test") - test_obj2 = Parameter(value=2.0, name="test2") - assert global_object.map.get_item_by_key(test_obj.unique_name) == test_obj - assert global_object.map.get_item_by_key(test_obj2.unique_name) == test_obj2 + def test_get_item_by_key(self, clear, base_object, parameter_object): + assert global_object.map.get_item_by_key(base_object.unique_name) == base_object + assert global_object.map.get_item_by_key(parameter_object.unique_name) == parameter_object def test_get_name_iterator(self, clear): assert global_object.map._get_name_iterator("BaseObj") == 0 @@ -64,5 +66,13 @@ def test_identical_unique_names_exception(self, clear, cls, kwargs): with pytest.raises(ValueError): test_obj2 = cls(name="test2", unique_name="test", **kwargs) - # test unique_name change + def test_unique_name_change_still_in_map(self, clear, base_object, parameter_object): + assert global_object.map.get_item_by_key("BaseObj_0") == base_object + assert global_object.map.get_item_by_key("Parameter_0") == parameter_object + base_object.unique_name = "test3" + parameter_object.unique_name = "test4" + assert global_object.map.get_item_by_key("BaseObj_0") == base_object + assert global_object.map.get_item_by_key("Parameter_0") == parameter_object + assert global_object.map.get_item_by_key("test3") == base_object + assert global_object.map.get_item_by_key("test4") == parameter_object From 663c08ef6d02bf48eeb23147fa68c3bc33b64a69 Mon Sep 17 00:00:00 2001 From: Christian Vedel Date: Mon, 8 Jul 2024 09:29:59 +0200 Subject: [PATCH 47/57] Rebase on develop --- examples_old/dataset_examples.ipynb | 6 +- examples_old/example6_broken.py | 6 +- resources/images/ec_logo_wfont.svg | 4 +- src/easyscience/Fitting/Fitting.py | 473 --------------- .../Objects/new_variable/descriptor_base.py | 43 +- .../Objects/new_variable/descriptor_bool.py | 4 +- .../Objects/new_variable/descriptor_number.py | 4 +- .../Objects/new_variable/descriptor_str.py | 4 +- .../Objects/new_variable/parameter.py | 24 +- src/easyscience/Objects/virtual.py | 2 +- .../fitting/minimizers/minimizer_base.py | 62 -- .../fitting/minimizers/minimizer_bumps.py | 1 - .../fitting/minimizers/minimizer_dfo.py | 1 - .../fitting/minimizers/minimizer_lmfit.py | 1 - src/easyscience/fitting/minimizers/utils.py | 17 - .../global_object/global_object.py | 2 +- .../integration_tests/Fitting/test_fitter.py | 4 +- .../Fitting/test_fitter_legacy_parameter.py | 4 +- tests/unit_tests/Fitting/test_fitting.py | 573 ------------------ .../new_variable/test_descriptor_base.py | 14 +- .../test_parameter_from_legacy.py | 4 +- 21 files changed, 83 insertions(+), 1170 deletions(-) delete mode 100644 src/easyscience/Fitting/Fitting.py delete mode 100644 tests/unit_tests/Fitting/test_fitting.py diff --git a/examples_old/dataset_examples.ipynb b/examples_old/dataset_examples.ipynb index 96b0cd19..48f206c2 100644 --- a/examples_old/dataset_examples.ipynb +++ b/examples_old/dataset_examples.ipynb @@ -625,7 +625,7 @@ "\n", "