diff --git a/doc/usersguide/example.rst b/doc/usersguide/example.rst index 54e761243..acd3251d2 100644 --- a/doc/usersguide/example.rst +++ b/doc/usersguide/example.rst @@ -50,6 +50,8 @@ Intermediate Concepts and VisTrails Packages controlflow cfassistant + list_handling + streaming parallelflow database example_webservices diff --git a/doc/usersguide/figures/list_handling/list-combine-workflow.png b/doc/usersguide/figures/list_handling/list-combine-workflow.png new file mode 100644 index 000000000..2c117faca Binary files /dev/null and b/doc/usersguide/figures/list_handling/list-combine-workflow.png differ diff --git a/doc/usersguide/figures/list_handling/list-combine.png b/doc/usersguide/figures/list_handling/list-combine.png new file mode 100644 index 000000000..3f99e910d Binary files /dev/null and b/doc/usersguide/figures/list_handling/list-combine.png differ diff --git a/doc/usersguide/list_handling.rst b/doc/usersguide/list_handling.rst new file mode 100644 index 000000000..37b42e724 --- /dev/null +++ b/doc/usersguide/list_handling.rst @@ -0,0 +1,94 @@ +.. _chap-list_handling: + +************************** +List Handling in VisTrails +************************** + +VisTrails supports passing typed lists between modules. Ports on modules have a +depth parameter specifying the list depth it expects. 0 means no list, 1 is a +list, 2 is a list of lists etc. Port depth can be specified either by module +creators or in a ``PythonSource`` or similar module. + +Iterating over lists +==================== + +Passing a list to a module that does not support lists will cause that module +to be executed once for each element in the list. When passing lists on +multiple input ports, the inputs will be combined. The default combination is +cartesian product, where each element in a list is combined with each element +in another list. This combination can be changed by selecting "Looping Options" +in the module menu. The options are ``Cartesian``, ``Pairwise`` (where the +elements are combined pairwise), and ``Custom``. ``Custom`` gives you complete +control over how inputs are combined and allows you to use both +pairwise/cartesian combiners as well as reordering them. The output of an +iterated module will be an ordered list with the individual results of the +module execution. This will cause modules downstrean to also be iterated over, +unless they accept a list as input. Iterated modules will have duplicated +connections to show that they are being iterated over. A list of lists will +have the connection triplicated etc. + +.. topic:: Try it Now! + + Lets create a simple example showing how to combine strings. First we will + create a module that generates lists of strings. Create a new workflow and + add a ``PythonSource`` module. Give it three output ports named ``s1``, + ``s2``, ``s3`` of type ``String`` and set their list depth to 1. Enter this + code: + +.. code-block:: python + + s1 = ['A', 'B', 'C'] + s2 = ['+', '-', '*'] + s3 = ['1', '2', '3'] + +.. topic:: Next Step! + + Add a ``ConcatenateString`` module, connect ``s1->str1``, ``s2->str2``, + ``s3->str3``. Notice how the connections going into ``ConcatenateString`` are + duplicated. This indicates that ``ConcatenateString`` will iterate over the + list inputs. Add a ``StandardOutput`` module and connect + ``ConcatenateString.value`` to ``StandardOutput.value``. This connection + will be duplicated in both ends, indicating they will both be iterated over. + Your workflow should now look like Figure + :ref:`fig-list_handling-list-combine-workflow`. + +.. _fig-list_handling-list-combine-workflow: + +.. figure:: figures/list_handling/list-combine-workflow.png + :align: center + :width: 1.8in + + The complete workflow + +.. topic:: Next Step! + + By default ``ConcatenateString`` will combine the inputs using cartesian + product ``["A+1", "A+2", "A+3", "A-1", ...]``. Lets change this. Go to + Module ``Menu->Looping Options`` and click custom. Right click in the port + list and add a pairwise product. Rearrange the ports so that it looks like + Figure :ref:`fig-list_handling-list-combine`. + +.. _fig-list_handling-list-combine: + +.. figure:: figures/list_handling/list-combine.png + :align: center + + A custom list combiner + +.. topic:: Finally: + + ``str2`` and ``str3`` will now be combined pairwise and then combined with + ``str1`` using cartesian product. Open the vistrails console and execute the + workflow. You should see this output: :vtl:`(Open result) ` + +.. code-block:: python + + A+1 + A-2 + A*3 + B+1 + B-2 + B*3 + C+1 + C-2 + C*3 diff --git a/doc/usersguide/streaming.rst b/doc/usersguide/streaming.rst new file mode 100644 index 000000000..987ee7a5e --- /dev/null +++ b/doc/usersguide/streaming.rst @@ -0,0 +1,94 @@ +.. _chap-streaming: + +********************** +Streaming in VisTrails +********************** + +Streaming data may be useful for a number of reasons, such as to incrementally +update a visualization, or to process more data than fit into memory. +VisTrails supports streaming data through the workflow. By implementing modules +that supports streaming, data items will be passed through the whole workflow +one at a time. + +Using Streaming +=============== + +Streaming is similar to list handling (see Chapter +:ref:`chap-list_handling`). Modules that create streams should output a port +with list depth 1. Downstream modules that do not accept lists will be executed +once for each item in the stream. Modules with multiple input streams will +combine them pairwise. For this reason the input streams should contain the +same number of items (or ben unlimited). + +Modules accepting a type with list depth 1, but does not support streaming, +will convert input streams to lists and execute after the streaming have ended. + +.. topic:: Try it Now! + + Lets use PythonSources to create a simple example that incrementally sums up + a sequence of numbers. First we will create a module that streams the + natural numbers up to some value. Create a new workflow and add a + ``PythonSource`` module. Give it an input port named ``inputs`` of type + Integer, which will specify the maxim number to stream, and an output port + named ``out`` of type ``Integer`` with list depth 1, which will be the output + stream. An output stream can be created by using + ``self.set_streaming_output``, which takes the port name, an iterator object, + and an optional length of the input items. To create an integer iterator we + can use xrange. Add this to the PythonSource: + +.. code-block:: python + + self.set_streaming_output('out', + xrange(inputs).__iter__(), + inputs) + +.. topic:: Next Step! + + Now lets create a module that captures the item in the string. Add a second + ``PythonSource`` module below the first one. Give it an input port named + ``integerStream`` of type Integer and list depth 1 that will be our input + stream. An input stream can be captured by adding the magic string + ``#STREAMING`` to the PYthonSource code and calling ``self.set_streaming`` + with a generator method as argument. The generator method should take the + module as an input. It should first initialize its value, in our case set + ``intsum=0``. Then it should receive the inputs in a loop ending with yield. + In each iteration the module will be updated to contain a new input in the + stream. Similar to a normal module, the loop should: + + 1. get inputs + 2. compute outputs + 3. set outputs + 4. call ``yield`` + + Below is the complete example. Add it to the PythonSource. + +.. code-block:: python + + #STREAMING - This tag is magic, do not change. + + def generator(module): + intsum = 0 + while 1: + i = module.get_input('integerStream') + intsum += i + print "Sum so far:", intsum + yield + + self.set_streaming(generator) + +.. topic:: Finally: + + Connect the two PythonSource's, set ``inputs`` to 100 in the first + PythonSource, open the vistrails console and execute. See how the output is + printed to the console while the stream runs and how the progress of the + modules increase. The output should look like this: :vtl:`(open in vistrails) + ` + +.. code-block:: python + + Sum so far: 0 + Sum so far: 1 + Sum so far: 3 + ... + Sum so far: 4851 + Sum so far: 4950 diff --git a/doc/usersguide/vtl/list-handling.vtl b/doc/usersguide/vtl/list-handling.vtl new file mode 100644 index 000000000..0b2444944 --- /dev/null +++ b/doc/usersguide/vtl/list-handling.vtl @@ -0,0 +1 @@ + diff --git a/doc/usersguide/vtl/streaming.vtl b/doc/usersguide/vtl/streaming.vtl new file mode 100644 index 000000000..09d5d54fb --- /dev/null +++ b/doc/usersguide/vtl/streaming.vtl @@ -0,0 +1 @@ + diff --git a/examples/list-handling.vt b/examples/list-handling.vt new file mode 100644 index 000000000..fb58d142a Binary files /dev/null and b/examples/list-handling.vt differ diff --git a/examples/streaming.vt b/examples/streaming.vt new file mode 100644 index 000000000..8e7c2f2d0 Binary files /dev/null and b/examples/streaming.vt differ diff --git a/vistrails/core/cache/hasher.py b/vistrails/core/cache/hasher.py index f6157bdd3..7225641e8 100644 --- a/vistrails/core/cache/hasher.py +++ b/vistrails/core/cache/hasher.py @@ -74,6 +74,14 @@ def function_signature(function, constant_hasher_map={}): constant_hasher_map)) return hasher.digest() + @staticmethod + def control_param_signature(control_param, constant_hasher_map={}): + hasher = sha_hash() + u = hasher.update + u(control_param.name) + u(control_param.value) + return hasher.digest() + @staticmethod def connection_signature(c): hasher = sha_hash() @@ -104,7 +112,10 @@ def module_signature(obj, constant_hasher_map={}): u(obj.module_descriptor.namespace or '') u(obj.module_descriptor.package_version or '') u(obj.module_descriptor.version or '') - u(hash_list(obj.functions, Hasher.function_signature, constant_hasher_map)) + u(hash_list(obj.functions, Hasher.function_signature, + constant_hasher_map)) + u(hash_list(obj.control_parameters, Hasher.control_param_signature, + constant_hasher_map)) return hasher.digest() @staticmethod diff --git a/vistrails/core/db/io.py b/vistrails/core/db/io.py index 589e5c68d..e3ac402e6 100644 --- a/vistrails/core/db/io.py +++ b/vistrails/core/db/io.py @@ -125,12 +125,14 @@ def get_workflow_diff(vt_pair_1, vt_pair_2): """ from vistrails.core.vistrail.pipeline import Pipeline - (v1, v2, pairs, heuristic_pairs, v1_only, v2_only, param_changes, \ - _, _, _, _) = \ - vistrails.db.services.vistrail.getWorkflowDiff(vt_pair_1, vt_pair_2, True) + (v1, v2, pairs, heuristic_pairs, v1_only, v2_only, param_changes, + cparam_changes, annot_changes, _, _, _, _) = \ + vistrails.db.services.vistrail.getWorkflowDiff(vt_pair_1, vt_pair_2, + True) Pipeline.convert(v1) Pipeline.convert(v2) - return (v1, v2, pairs, heuristic_pairs, v1_only, v2_only, param_changes) + return (v1, v2, pairs, heuristic_pairs, v1_only, v2_only, param_changes, + cparam_changes, annot_changes) def get_workflow_diff_with_connections(vt_pair_1, vt_pair_2): """get_workflow_diff_with_connections @@ -139,13 +141,15 @@ def get_workflow_diff_with_connections(vt_pair_1, vt_pair_2): """ from vistrails.core.vistrail.pipeline import Pipeline - (v1, v2, m_pairs, m_heuristic, v1_only, v2_only, param_changes, \ - c_pairs, c_heuristic, c1_only, c2_only) = \ - vistrails.db.services.vistrail.getWorkflowDiff(vt_pair_1, vt_pair_2, False) + (v1, v2, m_pairs, m_heuristic, v1_only, v2_only, param_changes, + cparam_changes, annot_changes, c_pairs, c_heuristic, c1_only, c2_only) =\ + vistrails.db.services.vistrail.getWorkflowDiff(vt_pair_1, vt_pair_2, + False) Pipeline.convert(v1) Pipeline.convert(v2) - return (v1, v2, m_pairs, m_heustric, v1_only, v2_only, param_changes, - c_pairs, c_heuristic, c1_only, c2_only) + return (v1, v2, m_pairs, m_heuristic, v1_only, v2_only, param_changes, + cparam_changes, annot_changes, c_pairs, c_heuristic, c1_only, + c2_only) def getPathAsAction(vt, v1, v2, do_copy=False): a = vistrails.db.services.vistrail.getPathAsAction(vt, v1, v2, do_copy) diff --git a/vistrails/core/interpreter/cached.py b/vistrails/core/interpreter/cached.py index af63936eb..a089a1575 100644 --- a/vistrails/core/interpreter/cached.py +++ b/vistrails/core/interpreter/cached.py @@ -46,7 +46,8 @@ from vistrails.core.interpreter.job import JobMonitor import vistrails.core.interpreter.utils from vistrails.core.log.controller import DummyLogController -from vistrails.core.modules.basic_modules import identifier as basic_pkg +from vistrails.core.modules.basic_modules import identifier as basic_pkg, \ + Iterator from vistrails.core.modules.module_registry import get_module_registry from vistrails.core.modules.vistrails_module import ModuleBreakpoint, \ ModuleConnector, ModuleError, ModuleErrors, ModuleHadError, \ @@ -134,7 +135,7 @@ def _handle_suspended(self, obj, error): error.name = name # if signature is not set we use the module identifier if not error.signature: - error.signature = i + error.signature = obj.signature jm.addParent(error) def end_update(self, obj, error=None, errorTrace=None, @@ -162,7 +163,8 @@ def end_update(self, obj, error=None, errorTrace=None, if i in self.ids: self.ids.remove(i) self.view.set_execution_progress( - 1.0 - (len(self.ids) * 1.0 / self.nb_modules)) + 1.0 - ((len(self.ids) + len(Iterator.generators)) * 1.0 / + (self.nb_modules + len(Iterator.generators)))) msg = '' if error is None else error.msg self.log.finish_execution(obj, msg, errorTrace, @@ -210,6 +212,7 @@ def create(self): self._objects = {} self._executed = {} self.filePool = self._file_pool + self._streams = [] def clear(self): self._file_pool.cleanup() @@ -340,6 +343,7 @@ def create_constant(param, module): persistent_id = tmp_to_persistent_module_map[i] module = self._persistent_pipeline.modules[persistent_id] obj = self._objects[persistent_id] = module.summon() + obj.list_depth = module.list_depth obj.interpreter = self obj.id = persistent_id obj.is_breakpoint = module.is_breakpoint @@ -487,6 +491,9 @@ def make_change_parameter(obj): persistent_sinks = [tmp_id_to_module_map[sink] for sink in pipeline.graph.sinks()] + self._streams.append(Iterator.generators) + Iterator.generators = [] + # Update new sinks for obj in persistent_sinks: abort = False @@ -519,6 +526,44 @@ def make_change_parameter(obj): if stop_on_error or abort: break + # execute all generators until inputs are exhausted + # this makes sure branching and multiple sinks are executed correctly + if not logging_obj.errors and not logging_obj.suspended and \ + Iterator.generators: + result = True + while result is not None: + for g in Iterator.generators: + abort = False + try: + result = g.next() + continue + except ModuleWasSuspended: + continue + except ModuleHadError: + pass + except AbortExecution: + break + except ModuleSuspended, ms: + ms.module.logging.end_update(ms.module, ms, + was_suspended=True) + continue + except ModuleErrors, mes: + for me in mes.module_errors: + me.module.logging.end_update(me.module, me) + logging_obj.signalError(me.module, me) + abort = abort or me.abort + except ModuleError, me: + me.module.logging.end_update(me.module, me, me.errorTrace) + logging_obj.signalError(me.module, me) + abort = me.abort + except ModuleBreakpoint, mb: + mb.module.logging.end_update(mb.module) + logging_obj.signalError(mb.module, mb) + abort = True + if stop_on_error or abort: + break + Iterator.generators = self._streams.pop() + if self.done_update_hook: self.done_update_hook(self._persistent_pipeline, self._objects) @@ -644,9 +689,9 @@ def fetch(name, default): if len(kwargs) > 0: raise VistrailsInternalError('Wrong parameters passed ' 'to execute: %s' % kwargs) - self.clean_non_cacheable_modules() + # if controller is not None: # vistrail = controller.vistrail # (pipeline, module_remap) = \ diff --git a/vistrails/core/interpreter/job.py b/vistrails/core/interpreter/job.py index 94526bedf..b7707840d 100644 --- a/vistrails/core/interpreter/job.py +++ b/vistrails/core/interpreter/job.py @@ -1,6 +1,6 @@ ############################################################################### ## -## Copyright (C) 2011-2013, NYU-Poly. +## Copyright (C) 2011-2014, NYU-Poly. ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org @@ -39,7 +39,8 @@ from vistrails.core.configuration import get_vistrails_configuration from vistrails.core.system import current_dot_vistrails from vistrails.core.modules.module_registry import get_module_registry -from vistrails.core.modules.vistrails_module import ModuleSuspended, NotCacheable +from vistrails.core.modules.vistrails_module import NotCacheable, \ + ModuleError, ModuleSuspended from uuid import uuid1 @@ -66,37 +67,40 @@ class JobMixin(NotCacheable): The package developer needs to implement the sub methods readInputs(), getId(), setResults(), startJob(), getMonitor() and finishJob(). """ + cache = None + params = None + def compute(self): - params = self.readInputs() - signature = self.getId(params) - jm = JobMonitor.getInstance() - # use cached job if it exist - cache = jm.getCache(signature) - if cache: - self.setResults(cache.parameters) - return - # check if job is running - job = jm.getJob(signature) - if job: - params = job.parameters + if self.cache is not None: + # Result is available and cached + self.setResults(self.cache.parameters) else: - # start job + # Start new job + params = self.readInputs() params = self.startJob(params) - # set visible name - # check custom name - m = self.interpreter._persistent_pipeline.modules[self.id] - if '__desc__' in m.db_annotations_key_index: - name = m.get_annotation_by_key('__desc__').value.strip() - else: - reg = get_module_registry() - name = reg.get_descriptor(self.__class__).name - jm.addJob(signature, params, name) - # call method to check job - jm.checkJob(self, signature, self.getMonitor(params)) - # job is finished, set outputs - params = self.finishJob(params) - self.setResults(params) - cache = jm.setCache(signature, params) + jm = JobMonitor.getInstance() + jm.addJob(self.signature, params, "name-TODO") + + def update_upstream(self): + if not hasattr(self, 'signature'): + raise ModuleError(self, "Module has no signature") + jm = JobMonitor.getInstance() + self.cache = jm.getCache(self.signature) + if self.cache is not None: + return # compute() will use self.cache + job = jm.getJob(self.signature) + if job is not None: + params = job.parameters + # Might raise ModuleSuspended + jm.checkJob(self, self.signature, self.getMonitor(params)) + # Didn't raise: job is finished + params = self.finishJob(params) + jm.setCache(self.signature, params) + self.cache = jm.getCache(self.signature) + # compute() will set results + # We need to submit a new job + # Update upstream, compute() will need it + super(JobMixin, self).update_upstream() def readInputs(self): """ readInputs() -> None diff --git a/vistrails/core/modules/basic_modules.py b/vistrails/core/modules/basic_modules.py index 6cad125be..1a2092b3e 100644 --- a/vistrails/core/modules/basic_modules.py +++ b/vistrails/core/modules/basic_modules.py @@ -779,6 +779,7 @@ def compute(self): self.set_output('value', not value) ############################################################################## + # List # If numpy is available, we consider numpy arrays to be lists as well @@ -937,7 +938,8 @@ def cache_this(): 'self': self}) if 'source' in locals_: del locals_['source'] - exec code_str in locals_, locals_ + # Python 2.6 needs code to end with newline + exec code_str + '\n' in locals_, locals_ if use_output: for k in self.output_ports_order: if locals_.get(k) != None: @@ -1164,6 +1166,68 @@ class Variant(Module): ############################################################################## +class Iterator(object): + """ + Used to keep track if list iteration, it will execute a module once for + each input in the list/generator. + """ + _settings = ModuleSettings(abstract=True) + + generators = [] + def __init__(self, values=None, depth=1, size=None, + module=None, generator=None, port=None): + self.list_depth = depth + self.values = values + self.module = module + self.generator = generator + self.port = port + self.size = size + if size is None and values: + self.size = len(values) + self.pos = 0 + if generator and generator not in Iterator.generators: + # add to global list of generators + # they will be uniquely ordered topologically + Iterator.generators.append(self.generator) + + def next(self): + if self.values is not None: + try: + item = self.values[self.pos] + self.pos += 1 + return item + except KeyError: + return None + # return next value - the generator + value = self.module.get_output(self.port) + if isinstance(value, Iterator): + raise ModuleError(self.module, "Iterator generator cannot contain an iterator") + return self.module.get_output(self.port) + + def all(self): + if self.values: + return self.values + items = [] + item = self.next() + while item is not None: + items.append(item) + item = self.next() + return items + + @staticmethod + def stream(): + # execute all generators until inputs are exhausted + # this makes sure branching and multiple sinks are executed correctly + result = True + if not Iterator.generators: + return + while result is not None: + for g in Iterator.generators: + result = g.next() + Iterator.generators = [] + +############################################################################## + class Assert(Module): """ Assert is a simple module that conditionally stops the execution. @@ -1247,7 +1311,12 @@ def init_constant(m): reg.add_input_port(m, "value", m) reg.add_output_port(m, "value", m) -_modules = [Module, Converter, Constant, Boolean, Float, Integer, String, List, Path, File, Directory, OutputPath, FileSink, DirectorySink, WriteFile, ReadFile, StandardOutput, Tuple, Untuple, ConcatenateString, Not, Dictionary, Null, Variant, Unpickle, PythonSource, SmartSource, Unzip, UnzipDirectory, Color, Round, TupleToList, Assert, AssertEqual, StringFormat] +_modules = [Module, Converter, Constant, Boolean, Float, Integer, String, List, + Path, File, Directory, OutputPath, + FileSink, DirectorySink, WriteFile, ReadFile, StandardOutput, + Tuple, Untuple, ConcatenateString, Not, Dictionary, Null, Variant, + Unpickle, PythonSource, SmartSource, Unzip, UnzipDirectory, Color, + Round, TupleToList, Assert, AssertEqual, StringFormat] def initialize(*args, **kwargs): # initialize the sub_module modules, too @@ -1322,11 +1391,60 @@ def outputName_remap(old_conn, new_module): return UpgradeWorkflowHandler.remap_module(controller, module_id, pipeline, module_remap) +############################################################################### + +class NewConstant(Constant): + """ + A new Constant module to be used inside the FoldWithModule module. + """ + def setValue(self, v): + self.set_output("value", v) + self.upToDate = True + +def create_constant(value): + """ + Creates a NewConstant module, to be used for the ModuleConnector. + """ + constant = NewConstant() + constant.setValue(value) + return constant + +def get_module(value, signature): + """ + Creates a module for value, in order to do the type checking. + """ + from vistrails.core.modules.basic_modules import Boolean, String, \ + Integer, Float, Constant, List + if isinstance(value, Constant): + return type(value) + elif isinstance(value, bool): + return Boolean + elif isinstance(value, str): + return String + elif isinstance(value, int): + return Integer + elif isinstance(value, float): + return Float + elif isinstance(value, list): + return List + elif isinstance(value, tuple): + v_modules = () + for element in xrange(len(value)): + v_modules += (get_module(value[element], signature[element]),) + return v_modules + else: # pragma: no cover + debug.warning("Could not identify the type of the list element.") + debug.warning("Type checking is not going to be done inside" + "FoldWithModule module.") + return None ############################################################################### import sys -import unittest +try: + import unittest2 as unittest +except: + import unittest class TestConcatenateString(unittest.TestCase): @staticmethod diff --git a/vistrails/core/modules/config.py b/vistrails/core/modules/config.py index 5fd9396ac..99e5c9905 100644 --- a/vistrails/core/modules/config.py +++ b/vistrails/core/modules/config.py @@ -336,6 +336,10 @@ class and should not be used by any other module. The maximum number of values allowed for the port + Port.depth: Integer + + The list depth of the port. Default is 0 (no list) + InputPort.label: String A label to be shown with a port @@ -485,6 +489,7 @@ def parse_documentation(): (("shape", None),), (("min_conns", 0),), (("max_conns", -1),), + (("depth", 0),), ]) InputPort = subnamedtuple('InputPort', Port, diff --git a/vistrails/core/modules/module_registry.py b/vistrails/core/modules/module_registry.py index 311f89041..0c0b9c8c0 100644 --- a/vistrails/core/modules/module_registry.py +++ b/vistrails/core/modules/module_registry.py @@ -1406,7 +1406,7 @@ def create_port_spec(self, name, type, signature=None, sigstring=None, optional=False, sort_key=-1, labels=None, defaults=None, values=None, entry_types=None, docstring=None, shape=None, - min_conns=0, max_conns=-1): + min_conns=0, max_conns=-1, depth=0): if signature is None and sigstring is None: raise VistrailsInternalError("create_port_spec: one of signature " "and sigstring must be specified") @@ -1487,7 +1487,8 @@ def create_port_spec(self, name, type, signature=None, sigstring=None, docstring=docstring, shape=shape, min_conns=min_conns, - max_conns=max_conns) + max_conns=max_conns, + depth=depth) # don't know how many port spec items are created until after... for psi in spec.port_spec_items: @@ -1538,19 +1539,20 @@ def has_port_spec(self, package, module_name, namespace, def add_port(self, descriptor, port_name, port_type, port_sig=None, port_sigstring=None, optional=False, sort_key=-1, labels=None, defaults=None, values=None, entry_types=None, - docstring=None, shape=None, min_conns=0, max_conns=-1): + docstring=None, shape=None, min_conns=0, max_conns=-1, + depth=0): spec = self.create_port_spec(port_name, port_type, port_sig, port_sigstring, optional, sort_key, labels, defaults, values, entry_types, docstring, shape, - min_conns, max_conns) + min_conns, max_conns, depth) self.add_port_spec(descriptor, spec) def add_input_port(self, module, portName, portSignature, optional=False, - sort_key=-1, labels=None, defaults=None, values=None, - entry_types=None, docstring=None, shape=None, - min_conns=0, max_conns=-1): + sort_key=-1, labels=None, defaults=None, + values=None, entry_types=None, docstring=None, + shape=None, min_conns=0, max_conns=-1, depth=0): """add_input_port(module: class, portName: string, portSignature: string, @@ -1563,7 +1565,8 @@ def add_input_port(self, module, portName, portSignature, optional=False, docstring: string, shape: tuple, min_conns: int, - max_conns: int) -> None + max_conns: int, + depth: int) -> None Registers a new input port with VisTrails. Receives the module that will now have a certain port, a string representing the @@ -1572,18 +1575,20 @@ def add_input_port(self, module, portName, portSignature, optional=False, input port is optional.""" descriptor = self.get_descriptor(module) if isinstance(portSignature, basestring): - self.add_port(descriptor, portName, 'input', None, portSignature, + self.add_port(descriptor, portName, 'input', None, portSignature, optional, sort_key, labels, defaults, values, - entry_types, docstring, shape, min_conns, max_conns) + entry_types, docstring, shape, min_conns, max_conns, + depth) else: self.add_port(descriptor, portName, 'input', portSignature, None, optional, sort_key, labels, defaults, values, - entry_types, docstring, shape, min_conns, max_conns) + entry_types, docstring, shape, min_conns, max_conns, + depth) def add_output_port(self, module, portName, portSignature, optional=False, sort_key=-1, docstring=None, shape=None, - min_conns=0, max_conns=-1): + min_conns=0, max_conns=-1, depth=0): """add_output_port(module: class, portName: string, portSignature: string, @@ -1592,7 +1597,8 @@ def add_output_port(self, module, portName, portSignature, optional=False, docstring: string, shape: tuple, min_conns: int, - max_conns: int) -> None + max_conns: int, + depth: int) -> None Registers a new output port with VisTrails. Receives the module that will now have a certain port, a string @@ -1603,11 +1609,11 @@ def add_output_port(self, module, portName, portSignature, optional=False, if isinstance(portSignature, basestring): self.add_port(descriptor, portName, 'output', None, portSignature, optional, sort_key, None, None, None, None, - docstring, shape, min_conns, max_conns) + docstring, shape, min_conns, max_conns, depth) else: self.add_port(descriptor, portName, 'output', portSignature, None, optional, sort_key, None, None, None, None, - docstring, shape, min_conns, max_conns) + docstring, shape, min_conns, max_conns, depth) def create_package(self, codepath, load_configuration=True): package_id = self.idScope.getNewId(Package.vtType) diff --git a/vistrails/core/modules/sub_module.py b/vistrails/core/modules/sub_module.py index 5857785a9..4eb9a08d6 100644 --- a/vistrails/core/modules/sub_module.py +++ b/vistrails/core/modules/sub_module.py @@ -233,6 +233,7 @@ def is_cacheable(self): def coalesce_port_specs(neighbors, type): reg = module_registry.get_module_registry() cur_descs = None + cur_depth = 0 Variant_desc = reg.get_descriptor_by_name(basic_pkg, 'Variant') if type == 'input': find_common = reg.find_descriptor_subclass @@ -247,13 +248,19 @@ def coalesce_port_specs(neighbors, type): if cur_descs is None: port_spec = module.get_port_spec(port_name, type) cur_descs = port_spec.descriptors() + cur_depth = port_spec.depth else: next_port_spec = module.get_port_spec(port_name, type) next_descs = next_port_spec.descriptors() + next_depth = next_port_spec.depth if len(cur_descs) != len(next_descs): raise VistrailsInternalError("Cannot have single port " "connect to incompatible " "types") + if cur_depth != next_depth: + raise VistrailsInternalError("Cannot have single port " + "connect to types with " + "different list depth") descs = [] for cur_desc, next_desc in izip(cur_descs, next_descs): if cur_desc is Variant_desc: @@ -266,11 +273,12 @@ def coalesce_port_specs(neighbors, type): new_desc = common_desc descs.append(new_desc) cur_descs = descs + if cur_descs: sigstring = '(' + ','.join(d.sigstring for d in cur_descs) + ')' else: sigstring = None - return sigstring + return (sigstring, cur_depth) def get_port_spec_info(pipeline, module): type_map = {'OutputPort': 'output', 'InputPort': 'input'} @@ -295,7 +303,7 @@ def get_port_spec_info(pipeline, module): neighbors = [(pipeline.modules[m_id], get_port_name(c_id)) for (m_id, c_id) in get_edges(module.id)] port_name = neighbors[0][1] - sigstring = coalesce_port_specs(neighbors, type) + sigstring, depth = coalesce_port_specs(neighbors, type) # sigstring = neighbor.get_port_spec(port_name, type).sigstring # FIXME check old registry here? @@ -306,7 +314,7 @@ def get_port_spec_info(pipeline, module): if function.name == 'optional': port_optional = function.params[0].strValue == 'True' # print 'psi:', port_name, old_name, sigstring - return (port_name, sigstring, port_optional, neighbors) + return (port_name, sigstring, port_optional, depth, neighbors) ############################################################################### @@ -427,19 +435,19 @@ def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L, input_remap = {} output_remap = {} for module in input_modules: - (port_name, sigstring, optional, _) = \ + (port_name, sigstring, optional, depth, _) = \ get_port_spec_info(pipeline, module) - input_ports.append((port_name, sigstring, optional)) + input_ports.append((port_name, sigstring, optional, depth)) input_remap[port_name] = module for module in output_modules: - (port_name, sigstring, optional, _) = \ + (port_name, sigstring, optional, depth, _) = \ get_port_spec_info(pipeline, module) - output_ports.append((port_name, sigstring, optional)) + output_ports.append((port_name, sigstring, optional, depth)) output_remap[port_name] = module # necessary for group - d['_input_ports'] = input_ports - d['_output_ports'] = output_ports + d['_input_ports'] = [IPort(*p[:3], depth=p[3]) for p in input_ports] + d['_output_ports'] = [OPort(*p[:3], depth=p[3]) for p in output_ports] d['input_remap'] = input_remap d['output_remap'] = output_remap d['pipeline'] = pipeline diff --git a/vistrails/core/modules/vistrails_module.py b/vistrails/core/modules/vistrails_module.py index 073d7aef1..4b552f066 100644 --- a/vistrails/core/modules/vistrails_module.py +++ b/vistrails/core/modules/vistrails_module.py @@ -32,14 +32,33 @@ ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### +from base64 import b16encode, b16decode + import copy -from itertools import izip +import json +import time +from itertools import izip, product import warnings from vistrails.core.data_structures.bijectivedict import Bidict from vistrails.core import debug from vistrails.core.modules.config import ModuleSettings, IPort, OPort -from vistrails.core.utils import VistrailsDeprecation, deprecated +from vistrails.core.utils import VistrailsDeprecation, deprecated, \ + xor, long2bytes +try: + import hashlib + sha1_hash = hashlib.sha1 +except ImportError: + import sha + sha1_hash = sha.new + +# Valid control parameters should be put here +LOOP_KEY = 'loop_type' +WHILE_COND_KEY = 'while_cond' +WHILE_INPUT_KEY = 'while_input' +WHILE_OUTPUT_KEY = 'while_output' +WHILE_MAX_KEY = 'while_max' +WHILE_DELAY_KEY = 'while_delay' class NeedsInputPort(Exception): def __init__(self, obj, port): @@ -275,6 +294,8 @@ def __init__(self): self.upToDate = False self.had_error = False self.was_suspended = False + self.is_while = False + self.list_depth = 0 self.set_output("self", self) # every object can return itself self.logging = _dummy_logging @@ -284,6 +305,8 @@ def __init__(self): # method order can work correctly self.is_method = Bidict() self._latest_method_order = 0 + self.iterated_ports = [] + self.streamed_ports = {} # Pipeline info that a module should know about This is useful # for a spreadsheet cell to know where it is from. It will be @@ -384,7 +407,7 @@ def update_upstream(self): raise suspended[0] elif suspended: raise ModuleSuspended( - self, + self, "multiple suspended upstream modules", children=suspended) elif was_suspended is not None: @@ -393,7 +416,41 @@ def update_upstream(self): for connector in connectorList: if connector.obj.get_output(connector.port) is InvalidOutput: self.remove_input_connector(iport, connector) - + + def set_iterated_ports(self): + """ set_iterated_ports() -> None + Calculates which inputs needs to be iterated over + This requires having the pipeline available + """ + self.iterated_ports = [] + if not self.moduleInfo.get('pipeline', None): + return + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + # get sorted port list + ports = [spec.name for spec in p_module.destinationPorts()] + items = [(port, self.inputPorts[port]) for port in ports + if port in self.inputPorts] + for iport, connectorList in items: + port_spec = p_module.get_port_spec(iport, 'input') + for connector in connectorList: + depth = connector.depth() - port_spec.depth + if depth > 0: + self.iterated_ports.append((iport, depth, connector.get_raw())) + + def set_streamed_ports(self): + """ set_streamed_ports() -> None + Calculates which inputs will be streamed + + """ + self.streamed_ports = {} + from vistrails.core.modules.basic_modules import Iterator + for iport, connectorList in self.inputPorts.items(): + for connector in connectorList: + value = connector.get_raw() + if isinstance(value, Iterator) and value.module: + self.streamed_ports[iport] = value + def update(self): """ update() -> None Check if the module is up-to-date then update the @@ -418,7 +475,26 @@ def update(self): try: if self.is_breakpoint: raise ModuleBreakpoint(self) - self.compute() + self.set_iterated_ports() + self.set_streamed_ports() + if self.streamed_ports: + self.compute_streaming() + elif self.iterated_ports: + self.compute_all() + else: + p_modules = self.moduleInfo['pipeline'] and \ + self.moduleInfo['pipeline'].modules + p_module = p_modules and \ + p_modules[self.moduleInfo['moduleId']] + if p_module and not self.is_while and \ + (p_module.has_control_parameter_with_name( + WHILE_COND_KEY) or + p_module.has_control_parameter_with_name( + WHILE_MAX_KEY)): + self.is_while = True + self.compute_while() + else: + self.compute() self.computed = True except ModuleSuspended, e: self.had_error, self.was_suspended = False, True @@ -450,6 +526,524 @@ def update(self): self.logging.end_update(self) self.logging.signalSuccess(self) + def join_port_list_rec(self, value, inputs): + if isinstance(value, basestring): + return [{value:i} for i in inputs[value]] + values = [self.join_port_list_rec(i, inputs) for i in value[1:]] + if value[0] == 'pairwise': + elements = zip(*values) + elif value[0]=='cartesian': + elements = list(product(*values)) + # join the dicts for each item + return [dict((k,v) for d in element for(k,v) in d.items()) for element in elements] + + def compute_all(self): + """This method executes the module once for each module. + Similarly to controlflow's fold. + + """ + from vistrails.core.modules.sub_module import InputPort + if isinstance(self, InputPort): + return self.compute() + if self.list_depth < 1: + raise ModuleError(self, "List compute has wrong depth %s" % + self.list_depth) + from vistrails.core.modules.basic_modules import Iterator + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + type = 'cartesian' + if p_module.has_control_parameter_with_name(LOOP_KEY): + type = p_module.get_control_parameter_by_name(LOOP_KEY).value + + suspended = [] + + # only iterate max depth and leave the others for the next iteration + ports = [port for port, depth, value in self.iterated_ports + if depth == self.list_depth] + inputs = {} + for port in ports: + value = self.get_input_list(port) + depth, iterator = [(depth, i) for _port, depth, i in + self.iterated_ports if _port == port][0] + # flatten all connections to a single list + value = [item for sublist in value for item in sublist] + if depth > 1: + # wrap values in Iterator of 1 less list depth + d = iterator.list_depth - 1 + value = [i if isinstance(i, Iterator) else Iterator(i, d) + for i in value] + inputs[port] = value + elements = [] + if type in ['pairwise', 'cartesian']: + elements = self.join_port_list_rec([type] + ports, inputs) + #elements = zip(*[inputs[port] for port in ports]) + #elements = list(product(*[inputs[port] for port in ports])) + else: + elements = self.join_port_list_rec(json.loads(type), inputs) + # convert port dict to list in correct port order + elements = [[element[port] for port in ports] for element in elements] + num_inputs = len(elements) + loop = self.logging.begin_loop_execution(self, num_inputs) + ## Update everything for each value inside the list + outputs = {} + module = copy.copy(self) + module.list_depth = self.list_depth - 1 + for i in xrange(num_inputs): + self.logging.update_progress(self, float(i)/num_inputs) + module.had_error = False + + if not self.upToDate: # pragma: no partial + ## Type checking + if i == 0: + module.typeChecking(module, ports, elements) + + module.upToDate = False + module.computed = False + + module.setInputValues(module, ports, elements[i], i) + + loop.begin_iteration(module, i) + + try: + module.update() + except ModuleSuspended, e: + e.loop_iteration = i + suspended.append(e) + loop.end_iteration(module) + continue + + loop.end_iteration(module) + + ## Getting the result from the output port + for nameOutput in module.outputPorts: + if nameOutput not in outputs: + outputs[nameOutput] = [] + output = module.get_output(nameOutput) + from vistrails.core.modules.basic_modules import Iterator + if isinstance(output, Iterator): + output = output.all() + outputs[nameOutput].append(output) + + self.logging.update_progress(self, i * 1.0 / num_inputs) + + if suspended: + raise ModuleSuspended( + self, + "function module suspended in %d/%d iterations" % ( + len(suspended), num_inputs), + children=suspended) + # set final outputs + for nameOutput in outputs: + self.set_output(nameOutput, outputs[nameOutput]) + loop.end_loop_execution() + + def compute_streaming(self): + """This method creates a generator object and sets the outputs as + Iterator generators. + + """ + if self.list_depth == 0: + # this will be executed once after streaming is finished + # if all has depth=0 this is a post-streaming module + if max([i.list_depth for i in self.streamed_ports.itervalues()]): + # input accumulation needed + from vistrails.core.modules.basic_modules import PythonSource + if isinstance(self, Streaming) or\ + (isinstance(self, PythonSource) and + '%23%20pragma%3A%20streaming' in self.get_input('source')): + # Magic tag: "# pragma: streaming" + self.compute() + else: + self.compute_accumulate() + else: + # forward the result after streaming + self.compute_after_streaming() + return + from vistrails.core.modules.basic_modules import Iterator + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + type = 'pairwise' + if p_module.has_control_parameter_with_name(LOOP_KEY): + type = p_module.get_control_parameter_by_name(LOOP_KEY).value + if type == 'cartesian': + raise ModuleError(self, + 'Cannot use cartesian product while streaming!') + suspended = [] + + # only iterate the max depth and leave others for the next iteration + ports = [port for port, depth, value in self.iterated_ports + if depth == self.list_depth] + num_inputs = self.iterated_ports[0][2].size + # the generator will read next from each iterated input port and + # compute the module again + module = copy.copy(self) + module.list_depth = self.list_depth - 1 + if num_inputs: + milestones = [i*num_inputs/10 for i in xrange(1,11)] + def generator(self): + self.logging.begin_compute(module) + i = 0 + while 1: + iter_dict = dict([(port, (depth, value)) + for port, depth, value in + self.iterated_ports]) + + elements = [iter_dict[port][1].next() for port in ports] + if None in elements: + for name_output in module.outputPorts: + module.set_output(name_output, None) + if suspended: + raise ModuleSuspended( + self, + ("function module suspended after streaming " + "%d/%d iterations") % ( + len(suspended), num_inputs), + children=suspended) + self.logging.update_progress(module, 1.0) + self.logging.end_update(module) + yield None + if num_inputs: + if i in milestones: + self.logging.update_progress(module,float(i)/num_inputs) + else: + self.logging.update_progress(module, 0.5) + module.had_error = False + ## Type checking + if i == 0: + module.typeChecking(module, ports, [elements]) + + module.upToDate = False + module.computed = False + + module.setInputValues(module, ports, elements, i) + + try: + module.compute() + except ModuleSuspended, e: + e.loop_iteration = i + suspended.append(e) + except Exception, e: + raise ModuleError(module, str(e)) + i += 1 + yield True + + _generator = generator(self) + # set streaming outputs + for name_output in self.outputPorts: + iterator = Iterator(depth=self.list_depth, + size=num_inputs, + module=module, + generator=_generator, + port=name_output) + self.set_output(name_output, iterator) + + def compute_accumulate(self): + """This method creates a generator object that converts all + streaming inputs to list inputs for modules that does not explicitly + support streaming. + + """ + from vistrails.core.modules.basic_modules import Iterator + suspended = [] + # max depth should be one + ports = self.streamed_ports.keys() + num_inputs = self.streamed_ports[ports[0]].size + # the generator will read next from each iterated input port and + # compute the module again + module = copy.copy(self) + module.list_depth = self.list_depth + module.had_error = False + module.upToDate = False + module.computed = False + + inputs = dict([(port, []) for port in ports]) + def generator(self): + self.logging.begin_update(module) + i = 0 + while 1: + elements = [self.streamed_ports[port].next() for port in ports] + if None in elements: + self.logging.begin_compute(module) + # assembled all inputs so do the actual computation + elements = [inputs[port] for port in ports] + ## Type checking + module.typeChecking(module, ports, zip(*elements)) + module.setInputValues(module, ports, elements, i) + try: + module.compute() + except Exception, e: + raise ModuleError(module, str(e)) + if suspended: + raise ModuleSuspended( + self, + ("function module suspended after streaming " + "%d/%d iterations") % ( + len(suspended), num_inputs), + children=suspended) + self.logging.end_update(module) + yield None + + for port, value in zip(ports, elements): + inputs[port].append(value) + for name_output in module.outputPorts: + module.set_output(name_output, None) + i += 1 + yield True + + _generator = generator(self) + # set streaming outputs + for name_output in self.outputPorts: + iterator = Iterator(depth=self.list_depth, + size=num_inputs, + module=module, + generator=_generator, + port=name_output) + self.set_output(name_output, iterator) + + def compute_after_streaming(self): + """This method creates a generator object that computes when the + streaming is finished. + + """ + from vistrails.core.modules.basic_modules import Iterator + suspended = [] + + # max depth should be one + # max depth should be one + ports = [port for port in self.streamed_ports] + num_inputs = self.streamed_ports[ports[0]].size + # the generator will read next from each iterated input port and + # compute the module again + module = copy.copy(self) + module.list_depth = self.list_depth + module.had_error = False + module.upToDate = False + module.computed = False + + def generator(self): + self.logging.begin_update(module) + i = 0 + for name_output in module.outputPorts: + module.set_output(name_output, None) + while 1: + elements = [self.streamed_ports[port].next() for port in ports] + if None not in elements: + self.logging.begin_compute(module) + ## Type checking + module.typeChecking(module, ports, [elements]) + module.setInputValues(module, ports, elements, i) + try: + module.compute() + except Exception, e: + raise ModuleError(module, str(e)) + if suspended: + raise ModuleSuspended( + self, + ("function module suspended after streaming " + "%d/%d iterations") % ( + len(suspended), num_inputs), + children=suspended) + self.logging.update_progress(self, 1.0) + self.logging.end_update(module) + yield None + i += 1 + yield True + + _generator = generator(self) + # set streaming outputs + for name_output in self.outputPorts: + iterator = Iterator(depth=self.list_depth, + size=num_inputs, + module=module, + generator=_generator, + port=name_output) + self.set_output(name_output, iterator) + + def compute_while(self): + """This method executes the module once for each module. + Similarly to fold. + + """ + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + name_condition = None + if p_module.has_control_parameter_with_name(WHILE_COND_KEY): + name_condition = p_module.get_control_parameter_by_name( + WHILE_COND_KEY).value + max_iterations = 20 + if p_module.has_control_parameter_with_name(WHILE_MAX_KEY): + max_iterations = int(p_module.get_control_parameter_by_name( + WHILE_MAX_KEY).value) + delay = 0.0 + if p_module.has_control_parameter_with_name(WHILE_DELAY_KEY): + delay = float(p_module.get_control_parameter_by_name( + WHILE_DELAY_KEY).value) + # todo only one state port supported right now + name_state_input = None + if p_module.has_control_parameter_with_name(WHILE_INPUT_KEY): + name_state_input = [p_module.get_control_parameter_by_name( + WHILE_INPUT_KEY).value] + name_state_output = None + if p_module.has_control_parameter_with_name(WHILE_OUTPUT_KEY): + name_state_output = [p_module.get_control_parameter_by_name( + WHILE_OUTPUT_KEY).value] + + from vistrails.core.modules.basic_modules import create_constant + + if name_state_input or name_state_output: + if not name_state_input or not name_state_output: + raise ModuleError(self, + "Passing state between iterations requires " + "BOTH StateInputPorts and StateOutputPorts " + "to be set") + if len(name_state_input) != len(name_state_output): + raise ModuleError(self, + "StateInputPorts and StateOutputPorts need " + "to have the same number of ports " + "(got %d and %d)" %(len(name_state_input), + len(name_state_output))) + + module = copy.copy(self) + module.had_error = False + + state = None + + loop = self.logging.begin_loop_execution(self, max_iterations) + for i in xrange(max_iterations): + if not self.upToDate: + module.upToDate = False + module.computed = False + + # Set state on input ports + if i > 0 and name_state_input: + for value, port in izip(state, name_state_input): + if port in module.inputPorts: + del module.inputPorts[port] + new_connector = ModuleConnector( + create_constant(value), + 'value') + module.set_input_port(port, new_connector) + + loop.begin_iteration(module, i) + + try: + module.update() # might raise ModuleError, ModuleSuspended, + # ModuleHadError, ModuleWasSuspended + except ModuleSuspended, e: + e.loop_iteration = i + raise + + loop.end_iteration(module) + + if name_condition is not None: + if name_condition not in module.outputPorts: + raise ModuleError( + module, + "Invalid output port: %s" % name_condition) + if not module.get_output(name_condition): + break + + if delay and i+1 != max_iterations: + time.sleep(delay) + + # Get state on output ports + if name_state_output: + state = [module.get_output(port) for port in name_state_output] + + self.logging.update_progress(self, i * 1.0 / max_iterations) + + loop.end_loop_execution() + + for name_output in self.outputPorts: + self.set_output(name_output, module.get_output(name_output)) + + def setInputValues(self, module, inputPorts, elementList, iteration): + """ + Function used to set a value inside 'module', given the input port(s). + """ + from vistrails.core.modules.basic_modules import create_constant + for element, inputPort in izip(elementList, inputPorts): + ## Cleaning the previous connector... + if inputPort in module.inputPorts: + del module.inputPorts[inputPort] + new_connector = ModuleConnector(create_constant(element), 'value') + module.set_input_port(inputPort, new_connector) + # Affix a fake signature on the module + # Ultimately, we might want to give it the signature it would have + # with its current functions if it had a connection to the upstream + # of our InputList port through a Getter module? + # This structure with the Getter is unlikely to actually happen + # anywhere though... + # The fake signature is + # XOR(signature(loop module), iteration, hash(inputPort)) + inputPort_hash = sha1_hash() + inputPort_hash.update(inputPort) + module.signature = b16encode(xor( + b16decode(self.signature.upper()), + long2bytes(iteration, 20), + inputPort_hash.digest())) + + def typeChecking(self, module, inputPorts, inputList): + """ + Function used to check if the types of the input list element and of the + inputPort of 'module' match. + """ + from vistrails.core.modules.basic_modules import Iterator + from vistrails.core.modules.basic_modules import get_module + for elementList in inputList: + if len(elementList) != len(inputPorts): + raise ModuleError(self, + 'The number of input values and input ports ' + 'are not the same.') + for element, inputPort in izip(elementList, inputPorts): + if isinstance(element, Iterator): + if element.values: + return + else: + raise ModuleError(self, "Iterator is not allowed here") + p_modules = module.moduleInfo['pipeline'].modules + p_module = p_modules[module.moduleInfo['moduleId']] + port_spec = p_module.get_port_spec(inputPort, 'input') + v_module = get_module(element, port_spec.signature) + if v_module is not None: + if not self.compare(port_spec, v_module, inputPort): + raise ModuleError(self, + 'The type of a list element does ' + 'not match with the type of the ' + 'port %s.' % inputPort) + + del v_module + else: + break + + def createSignature(self, v_module): + """ + ` Function used to create a signature, given v_module, for a port spec. + """ + if isinstance(v_module, tuple): + v_module_class = [] + for module_ in v_module: + v_module_class.append(self.createSignature(module_)) + return v_module_class + else: + return v_module + + def compare(self, port_spec, v_module, port): + """ + Function used to compare two port specs. + """ + port_spec1 = port_spec + + from vistrails.core.modules.module_registry import get_module_registry + from vistrails.core.vistrail.port_spec import PortSpec + reg = get_module_registry() + + v_module = self.createSignature(v_module) + port_spec2 = PortSpec(**{'signature': v_module}) + matched = reg.are_specs_matched(port_spec2, port_spec1) + + return matched + def compute(self): """This method should be overridden in order to perform the module's computation. @@ -479,7 +1073,18 @@ def get_input(self, port_name, allow_default=True): for conn in self.inputPorts[port_name]: if isinstance(conn.obj, InputPort): return conn() - return self.inputPorts[port_name][0]() + value = self.inputPorts[port_name][0]() + depth = self.inputPorts[port_name][0].depth() + # type check list of lists + if self.moduleInfo.get('pipeline', False): + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + spec = p_module.get_port_spec(port_name, 'input') + # wrap depths that are too shallow + while depth - self.list_depth - spec.depth < 0: + value = [value] + depth += 1 + return value def get_input_list(self, port_name): """Returns the value(s) coming in on the input port named @@ -501,7 +1106,36 @@ def get_input_list(self, port_name): if isinstance(connector.obj, InputPort)] if len(fromInputPortModule)>0: return fromInputPortModule - return [connector() for connector in self.inputPorts[port_name]] + ports = [] + for connector in self.inputPorts[port_name]: + value = connector() + depth = connector.depth() + root = value + # type check list of lists + if self.moduleInfo['pipeline']: + from vistrails.core.modules.basic_modules import Iterator + p_modules = self.moduleInfo['pipeline'].modules + p_module = p_modules[self.moduleInfo['moduleId']] + spec = p_module.get_port_spec(port_name, 'input') + # wrap depths that are too shallow + while depth - self.list_depth - spec.depth < 0: + value = [value] + depth += 1 + for i in xrange(1, depth): + try: + root = [(item.all() if isinstance(item, Iterator) + else item) for item in root] + root = [item for sublist in root for item in sublist] + except TypeError: + raise ModuleError(self, "List on port %s has wrong" + " depth %s, expected %s." % + (port_name, i-1, depth)) + + if depth and root: + self.typeChecking(self, [port_name], + [[r] for r in root] if depth else [[root]]) + ports.append(value) + return ports def set_output(self, port_name, value): """This method is used to set a value on an output port. @@ -511,6 +1145,20 @@ def set_output(self, port_name, value): :param value: the value to be assigned to the port """ + if value is not self: + from vistrails.core.modules.basic_modules import Iterator + if not isinstance(value, Iterator): + # wrap lists in the special Iterator class + p_modules = self.moduleInfo['pipeline'] and \ + self.moduleInfo['pipeline'].modules + p_module = p_modules and p_modules[self.moduleInfo['moduleId']] + try: + port_spec = p_module and p_module.get_port_spec(port_name, + 'output') + except Exception: + port_spec = None + if port_spec and (port_spec.depth + self.list_depth): + value = Iterator(value, port_spec.depth + self.list_depth) self.outputPorts[port_name] = value def check_input(self, port_name): @@ -668,6 +1316,127 @@ def remove_input_connector(self, port_name, connector): if conList==[]: del self.inputPorts[port_name] + def create_instance_of_type(self, ident, name, ns=''): + """ Create a vistrails module from the module registry. This creates + an instance of the module for use in creating the object output by a + Module. + """ + from vistrails.core.modules.module_registry import get_module_registry + try: + reg = get_module_registry() + m = reg.get_module_by_name(ident, name, ns) + return m() + except: + msg = "Cannot get module named " + str(name) + \ + " with identifier " + str(ident) + " and namespace " + ns + raise ModuleError(self, msg) + + def set_streaming(self, UserGenerator): + """creates a generator object that computes when the next input is received. + """ + # use the below tag if calling from a PythonSource + # pragma: streaming - This tag is magic, do not change. + from vistrails.core.modules.basic_modules import Iterator + + ports = self.streamed_ports.keys() + num_inputs = self.streamed_ports[ports[0]].size + module = copy.copy(self) + module.list_depth = self.list_depth-1 + module.had_error = False + module.upToDate = False + module.computed = False + + if num_inputs: + milestones = [i*num_inputs/10 for i in xrange(1,11)] + + def Generator(self): + self.logging.begin_compute(module) + i = 0 + # + #intsum = 0 + userGenerator = UserGenerator(module) + while 1: + elements = [self.streamed_ports[port].next() for port in ports] + if None in elements: + self.logging.update_progress(self, 1.0) + self.logging.end_update(module) + for name_output in module.outputPorts: + module.set_output(name_output, None) + yield None + ## Type checking + module.typeChecking(module, ports, [elements]) + module.setInputValues(module, ports, elements, i) + + userGenerator.next() + # + #intsum += dict(zip(ports, elements))['integerStream'] + #print "Sum so far:", intsum + + # + #module.set_output(name_output, intsum) + if num_inputs: + if i in milestones: + self.logging.update_progress(self,float(i)/num_inputs) + else: + self.logging.update_progress(self, 0.5) + i += 1 + yield True + + generator = Generator(self) + # sets streaming outputs for downstream modules + for name_output in self.outputPorts: + iterator = Iterator(size=num_inputs, + module=module, + generator=generator, + port=name_output) + + self.set_output(name_output, iterator) + + def set_streaming_output(self, port, generator, size=0): + """This method is used to set a streaming output port. + + :param port: the name of the output port to be set + :type port: String + :param generator: An iterator object supporting .next() + :param size: The number of values if known (default=0) + :type size: Integer + """ + from vistrails.core.modules.basic_modules import Iterator + module = copy.copy(self) + module.list_depth = -1 + + if size: + milestones = [i*size/10 for i in xrange(1,11)] + def _generator(): + i = 0 + while 1: + try: + value = generator.next() + except StopIteration: + module.set_output(port, None) + self.logging.update_progress(self, 1.0) + yield None + except Exception, e: + me = ModuleError(self, "Error generating value: %s"% str(e), + errorTrace=str(e)) + raise me + if value is None: + module.set_output(port, None) + self.logging.update_progress(self, 1.0) + yield None + module.set_output(port, value) + if size: + if i in milestones: + self.logging.update_progress(self,float(i)/size) + else: + self.logging.update_progress(self, 0.5) + i += 1 + yield True + self.set_output(port, Iterator(size=size, + module=module, + generator=_generator(), + port=port)) + @classmethod def provide_input_port_documentation(cls, port_name): return None @@ -740,6 +1509,14 @@ def is_cacheable(self): ################################################################################ +class Streaming(object): + """ A mixin indicating support for streamable inputs + + """ + pass + +################################################################################ + class Converter(Module): """Base class for automatic conversion modules. @@ -799,6 +1576,18 @@ def clear(self): self.obj = None self.port = None + def depth(self, result=None): + """depth(result) -> int. Returns the list depth of the port value.""" + if result is None: + result = self.obj.get_output(self.port) + from vistrails.core.modules.basic_modules import Iterator + return result.list_depth if isinstance(result, Iterator) else 0 + + def get_raw(self): + """get_raw() -> Module. Returns the value or an Iterator.""" + return self.obj.get_output(self.port) + + def __call__(self): result = self.obj.get_output(self.port) if isinstance(result, Module): @@ -807,6 +1596,10 @@ def __call__(self): "module=%s, port=%s, object=%r" % (type(self.obj).__name__, self.port, result), UserWarning) + depth = self.depth() + from vistrails.core.modules.basic_modules import Iterator + if isinstance(result, Iterator): + result = result.all() if self.spec is not None and self.typecheck is not None: descs = self.spec.descriptors() typecheck = self.typecheck @@ -814,7 +1607,18 @@ def __call__(self): if not typecheck[0]: return result mod = descs[0].module - if hasattr(mod, 'validate') and not mod.validate(result): + value = result + # flatten list + for i in xrange(depth): + try: + value = [item for sublist in value for item in sublist] + except TypeError: + raise ModuleError(self.obj, "List on port %s has wrong" + " depth %s, expected %s." + "%s"% (self.port, i, depth)) + if depth: + value = value[0] if value else None + if hasattr(mod, 'validate') and value and not mod.validate(value): raise ModuleError(self.obj, "Type passed on Variant port " "%s does not match destination type " "%s" % (self.port, descs[0].name)) @@ -885,3 +1689,35 @@ def new_module(base_module, name, dict={}, docstring=None): # >>> c = Z() # >>> c.f() # 4 + +import unittest + +class TestImplicitLooping(unittest.TestCase): + def test_features(self): + from vistrails.core.system import vistrails_root_directory + from vistrails.core.db.locator import FileLocator + from vistrails.core.db.io import load_vistrail + from vistrails.core.console_mode import run + from vistrails.tests.utils import capture_stdout + import os + resources = vistrails_root_directory() + '/tests/resources/' + files = ['test-implicit-while.vt', + 'test-streaming.vt', + 'test-list-custom.vt'] + for vtfile in files: + try: + errs = [] + filename = os.path.join(resources, vtfile) + locator = FileLocator(os.path.abspath(filename)) + (v, _, _, _) = load_vistrail(locator) + w_list = [] + for version, _ in v.get_tagMap().iteritems(): + w_list.append((locator,version)) + if len(w_list) > 0: + with capture_stdout() as c: + errs = run(w_list, update_vistrail=False) + for err in errs: + self.fail(str(err)) + except Exception, e: + self.fail(debug.format_exception(e)) + diff --git a/vistrails/core/upgradeworkflow.py b/vistrails/core/upgradeworkflow.py index 61cfda4c9..b90d94ea9 100644 --- a/vistrails/core/upgradeworkflow.py +++ b/vistrails/core/upgradeworkflow.py @@ -44,6 +44,7 @@ from vistrails.core.packagemanager import get_package_manager from vistrails.core.system import get_vistrails_basic_pkg_id from vistrails.core.vistrail.annotation import Annotation +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.connection import Connection from vistrails.core.vistrail.port import Port from vistrails.core.vistrail.port_spec import PortSpec @@ -70,7 +71,7 @@ def __init__(self, start_version, end_version, output_version, new_module=None, dst_port_remap=None, src_port_remap=None, function_remap=None, annotation_remap=None, - module_name=None): + control_param_remap=None, module_name=None): self.module_name = module_name self.start_version = start_version self.end_version = end_version @@ -93,6 +94,10 @@ def __init__(self, start_version, end_version, self._annotation_remap = {} else: self._annotation_remap = annotation_remap + if control_param_remap is None: + self._control_param_remap = {} + else: + self._control_param_remap = control_param_remap @classmethod def from_tuple(cls, module_name, t): @@ -136,6 +141,10 @@ def _get_annotation_remap(self): return self._annotation_remap annotation_remap = property(_get_annotation_remap) + def _get_control_param_remap(self): + return self._control_param_remap + control_param_remap = property(_get_control_param_remap) + def add_remap(self, remap_type, remap_name, remap_change): if not hasattr(self, '_%s' % remap_type): raise ValueError('remap_type "%s" not allowed' % remap_type) @@ -306,8 +315,14 @@ def find_descriptor(controller, pipeline, module_id, desired_version=''): return d @staticmethod - def check_upgrade(pipeline, module_id, d, function_remap={}, - src_port_remap={}, dst_port_remap={}): + def check_upgrade(pipeline, module_id, d, function_remap=None, + src_port_remap=None, dst_port_remap=None): + if function_remap is None: + function_remap = {} + if src_port_remap is None: + src_port_remap = {} + if dst_port_remap is None: + dst_port_remap = {} invalid_module = pipeline.modules[module_id] def check_connection_port(port): port_type = PortSpec.port_type_map.inverse[port.type] @@ -336,8 +351,9 @@ def check_connection_port(port): @staticmethod def attempt_automatic_upgrade(controller, pipeline, module_id, - function_remap={}, src_port_remap={}, - dst_port_remap={}, annotation_remap={}): + function_remap=None, src_port_remap=None, + dst_port_remap=None, annotation_remap=None, + control_param_remap=None): """attempt_automatic_upgrade(module_id, pipeline): [Action] Attempts to automatically upgrade module by simply adding a @@ -380,7 +396,8 @@ def attempt_automatic_upgrade(controller, pipeline, module_id, function_remap, src_port_remap, dst_port_remap, - annotation_remap) + annotation_remap, + control_param_remap) @staticmethod def create_new_connection(controller, src_module, src_port, @@ -424,9 +441,20 @@ def create_new_connection(controller, src_module, src_port, @staticmethod def replace_generic(controller, pipeline, old_module, new_module, - function_remap={}, src_port_remap={}, - dst_port_remap={}, annotation_remap={}, - use_registry=True): + function_remap=None, src_port_remap=None, + dst_port_remap=None, annotation_remap=None, + control_param_remap=None, use_registry=True): + if function_remap is None: + function_remap = {} + if src_port_remap is None: + src_port_remap = {} + if dst_port_remap is None: + dst_port_remap = {} + if annotation_remap is None: + annotation_remap = {} + if control_param_remap is None: + control_param_remap = {} + basic_pkg = get_vistrails_basic_pkg_id() ops = [] @@ -452,6 +480,27 @@ def replace_generic(controller, pipeline, old_module, new_module, value=annotation.value) new_module.add_annotation(new_annotation) + for control_param in old_module.control_parameters: + if control_param.name not in control_param_remap: + control_param_name = control_param.name + else: + remap = control_param_remap[control_param.name] + if remap is None: + # don't add the control param back in + continue + elif not isinstance(remap, basestring): + ops.extend(remap(control_param)) + continue + else: + control_param_name = remap + + new_control_param = \ + ModuleControlParam(id=controller.id_scope.getNewId( + ModuleControlParam.vtType), + name=control_param_name, + value=control_param.value) + new_module.add_control_parameter(new_control_param) + if not old_module.is_group() and not old_module.is_abstraction(): for port_spec in old_module.port_spec_list: if port_spec.type == 'input': @@ -603,8 +652,9 @@ def replace_group(controller, pipeline, module_id, new_subpipeline): @staticmethod def replace_module(controller, pipeline, module_id, new_descriptor, - function_remap={}, src_port_remap={}, dst_port_remap={}, - annotation_remap={}, use_registry=True): + function_remap=None, src_port_remap=None, + dst_port_remap=None, annotation_remap=None, + control_param_remap=None, use_registry=True): old_module = pipeline.modules[module_id] internal_version = -1 # try to determine whether new module is an abstraction @@ -625,6 +675,7 @@ def replace_module(controller, pipeline, module_id, new_descriptor, src_port_remap, dst_port_remap, annotation_remap, + control_param_remap, use_registry) @staticmethod @@ -739,6 +790,7 @@ def outputName_remap(old_conn, new_module): module_remap.src_port_remap, module_remap.dst_port_remap, module_remap.annotation_remap, + module_remap.control_param_remap, use_registry) for a in actions: diff --git a/vistrails/core/utils/__init__.py b/vistrails/core/utils/__init__.py index b1171e16e..2185d3a9f 100644 --- a/vistrails/core/utils/__init__.py +++ b/vistrails/core/utils/__init__.py @@ -538,7 +538,44 @@ def __call__(self): import new instance_method = new.instancemethod return instance_method(self._func, self._obj(), self._clas) - + +############################################################################### + +def xor(first, *others): + """XORs bytestrings. + + Example: xor('abcd', '\x20\x01\x57\x56') = 'Ac42' + """ + l = len(first) + first = [ord(c) for c in first] + for oth in others: + if len(oth) != l: + raise ValueError("All bytestrings should have the same length: " + "%d != %d" % (l, len(oth))) + first = [c ^ ord(o) for (c, o) in itertools.izip(first, oth)] + return ''.join(chr(c) for c in first) + +def long2bytes(nb, length=None): + """Turns a single integer into a little-endian bytestring. + + Uses as many bytes as necessary or optionally pads to length bytes. + Might return a result longer than length. + + Example: long2bytes(54321, 4) = b'\x31\xD4\x00\x00' + """ + if nb < 0: + raise ValueError + elif nb == 0: + result = b'\x00' + else: + result = b'' + while nb > 0: + result += chr(nb & 0xFF) + nb = nb >> 8 + if length is not None and len(result) < length: + result += '\x00' * (length - len(result)) + return result + ################################################################################ class Chdir(object): diff --git a/vistrails/core/vistrail/abstraction.py b/vistrails/core/vistrail/abstraction.py index 8e42321a3..b1bcf6733 100644 --- a/vistrails/core/vistrail/abstraction.py +++ b/vistrails/core/vistrail/abstraction.py @@ -40,6 +40,7 @@ from vistrails.core.vistrail.annotation import Annotation from vistrails.core.vistrail.location import Location from vistrails.core.vistrail.module import Module +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.db.domain import DBAbstraction @@ -98,6 +99,8 @@ def convert(_abstraction): ModuleFunction.convert(_function) for _annotation in _abstraction.db_get_annotations(): Annotation.convert(_annotation) + for _control_parameter in _abstraction.db_get_controlParameters(): + ModuleControlParam.convert(_control_parameter) _abstraction.set_defaults() ########################################################################## @@ -107,6 +110,7 @@ def convert(_abstraction): id = DBAbstraction.db_id cache = DBAbstraction.db_cache annotations = DBAbstraction.db_annotations + control_parameters = DBAbstraction.db_controlParameters location = DBAbstraction.db_location center = DBAbstraction.db_location name = DBAbstraction.db_name diff --git a/vistrails/core/vistrail/controller.py b/vistrails/core/vistrail/controller.py index e815610b9..e07bad796 100644 --- a/vistrails/core/vistrail/controller.py +++ b/vistrails/core/vistrail/controller.py @@ -77,6 +77,7 @@ from vistrails.core.vistrail.group import Group from vistrails.core.vistrail.location import Location from vistrails.core.vistrail.module import Module, ModuleFunction, ModuleParam +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.core.vistrail.module_param import ModuleParam from vistrails.core.vistrail.pipeline import Pipeline @@ -809,13 +810,15 @@ def create_port_spec(self, *args, **kwargs): @staticmethod def create_port_spec_static(id_scope, module, port_type, port_name, - port_sigstring, port_sort_key=-1): + port_sigstring, port_sort_key=-1, + port_depth=0): p_id = id_scope.getNewId(PortSpec.vtType) port_spec = PortSpec(id=p_id, type=port_type, name=port_name, sigstring=port_sigstring, sort_key=port_sort_key, + depth=port_depth ) # don't know how many port spec items are created until after... for psi in port_spec.port_spec_items: @@ -1234,6 +1237,48 @@ def add_annotation(self, pair, module_id): module.id)]) return action + @vt_action + def delete_control_parameter(self, name, module_id): + """ delete_control_parameter(name: str, module_id: long) -> version_id + Deletes an control_parameter from a module + + """ + module = self.current_pipeline.get_module_by_id(module_id) + control_parameter = module.get_control_parameter_by_name(name) + action = vistrails.core.db.action.create_action([('delete', control_parameter, + module.vtType, module.id)]) + return action + + @vt_action + def add_control_parameter(self, pair, module_id): + """ add_control_parameter(pair: (str, str), moduleId: int) + Add/Update a name/value pair control_parameter into the module of + moduleId + + """ + assert isinstance(pair[0], basestring) + assert isinstance(pair[1], basestring) + if pair[0].strip()=='': + return + + module = self.current_pipeline.get_module_by_id(module_id) + a_id = self.vistrail.idScope.getNewId(ModuleControlParam.vtType) + control_parameter = ModuleControlParam(id=a_id, + name=pair[0], + value=pair[1], + ) + if module.has_control_parameter_with_name(pair[0]): + old_control_parameter = module.get_control_parameter_by_name(pair[0]) + action = \ + vistrails.core.db.action.create_action([('change', old_control_parameter, + control_parameter, + module.vtType, module.id)]) + else: + action = vistrails.core.db.action.create_action([('add', control_parameter, + module.vtType, + module.id)]) + return action + def update_functions_ops_from_ids(self, module_id, functions): module = self.current_pipeline.modules[module_id] return self.update_functions_ops(module, functions) diff --git a/vistrails/core/vistrail/group.py b/vistrails/core/vistrail/group.py index 7a7487922..79214be35 100644 --- a/vistrails/core/vistrail/group.py +++ b/vistrails/core/vistrail/group.py @@ -38,6 +38,7 @@ from vistrails.core.vistrail.annotation import Annotation from vistrails.core.vistrail.location import Location from vistrails.core.vistrail.module import Module +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.core.vistrail.port_spec import PortSpec, PortEndPoint from vistrails.db.domain import DBGroup @@ -108,6 +109,8 @@ def convert(_group): ModuleFunction.convert(_function) for _annotation in _group.db_get_annotations(): Annotation.convert(_annotation) + for _control_parameter in _group.db_get_controlParameters(): + ModuleControlParam.convert(_control_parameter) _group.set_defaults() ########################################################################## @@ -117,6 +120,7 @@ def convert(_group): id = DBGroup.db_id cache = DBGroup.db_cache annotations = DBGroup.db_annotations + control_parameters = DBGroup.db_controlParameters location = DBGroup.db_location center = DBGroup.db_location # version = DBGroup.db_version @@ -235,20 +239,20 @@ def make_port_specs(self): registry = get_module_registry() for module in self.pipeline.module_list: if module.name == 'OutputPort' and module.package == basic_pkg: - (port_name, sigstring, optional, _) = \ + (port_name, sigstring, optional, depth, _) = \ self.get_port_spec_info(module) port_spec = registry.create_port_spec(port_name, 'output', None, sigstring, - optional) + optional, depth=depth) self._port_specs[(port_name, 'output')] = port_spec self._output_port_specs.append(port_spec) self._output_remap[port_name] = module elif module.name == 'InputPort' and module.package == basic_pkg: - (port_name, sigstring, optional, _) = \ + (port_name, sigstring, optional, depth, _) = \ self.get_port_spec_info(module) port_spec = registry.create_port_spec(port_name, 'input', None, sigstring, - optional) + optional, depth=depth) self._port_specs[(port_name, 'input')] = port_spec self._input_port_specs.append(port_spec) self._input_remap[port_name] = module diff --git a/vistrails/core/vistrail/module.py b/vistrails/core/vistrail/module.py index 3360e146c..ba1dac72a 100644 --- a/vistrails/core/vistrail/module.py +++ b/vistrails/core/vistrail/module.py @@ -40,19 +40,16 @@ import weakref from vistrails.db.domain import DBModule -from vistrails.core.data_structures.point import Point from vistrails.core.vistrail.annotation import Annotation from vistrails.core.vistrail.location import Location +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.core.vistrail.module_param import ModuleParam -from vistrails.core.vistrail.port import Port, PortEndPoint from vistrails.core.vistrail.port_spec import PortSpec -from vistrails.core.utils import NoSummon, VistrailsInternalError, report_stack -from vistrails.core.modules.module_descriptor import OverloadedPort -from vistrails.core.modules.module_registry import get_module_registry, ModuleRegistry +from vistrails.core.utils import NoSummon +from vistrails.core.modules.module_registry import get_module_registry import unittest -import vistrails.core ################################################################################ @@ -93,6 +90,8 @@ def set_defaults(self, other=None): self.is_watched = False self._descriptor_info = None self._module_descriptor = None + self.list_depth = 0 + self.iterated_ports = [] else: self.portVisible = copy.copy(other.portVisible) self.visible_input_ports = copy.copy(other.visible_input_ports) @@ -104,6 +103,8 @@ def set_defaults(self, other=None): self.is_breakpoint = other.is_breakpoint self.is_watched = other.is_watched self._descriptor_info = None + self.list_depth = other.list_depth + self.iterated_ports = other.iterated_ports self._module_descriptor = other._module_descriptor if not self.namespace: self.namespace = None @@ -142,6 +143,8 @@ def convert(_module): ModuleFunction.convert(_function) for _annotation in _module.db_get_annotations(): Annotation.convert(_annotation) + for _control_parameter in _module.db_get_controlParameters(): + ModuleControlParam.convert(_control_parameter) _module.set_defaults() ########################################################################## @@ -154,6 +157,7 @@ def convert(_module): id = DBModule.db_id cache = DBModule.db_cache annotations = DBModule.db_annotations + control_parameters = DBModule.db_controlParameters location = DBModule.db_location center = DBModule.db_location name = DBModule.db_name @@ -187,6 +191,14 @@ def has_annotation_with_key(self, key): return self.db_has_annotation_with_key(key) def get_annotation_by_key(self, key): return self.db_get_annotation_by_key(key) + def add_control_parameter(self, controlParameter): + self.db_add_controlParameter(controlParameter) + def delete_control_parameter(self, controlParameter): + self.db_delete_controlParameter(controlParameter) + def has_control_parameter_with_name(self, name): + return self.db_has_controlParameter_with_name(name) + def get_control_parameter_by_name(self, name): + return self.db_get_controlParameter_by_name(name) def toggle_breakpoint(self): self.is_breakpoint = not self.is_breakpoint def toggle_watched(self): @@ -356,13 +368,14 @@ def get_name(): if self.namespace: return self.namespace + '|' + self.name return self.name - return ("(Module '%s:%s' id=%s functions:%s port_specs:%s annotations:%s)@%X" % + return ("(Module '%s:%s' id=%s functions:%s port_specs:%s annotations:%s control_parameters:%s)@%X" % (self.package, get_name(), self.id, [str(f) for f in self.functions], [str(port_spec) for port_spec in self.db_portSpecs], [str(a) for a in self.annotations], + [str(c) for c in self.control_parameters], id(self))) def __eq__(self, other): @@ -387,12 +400,17 @@ def __eq__(self, other): return False if len(self.annotations) != len(other.annotations): return False + if len(self.control_parameters) != len(other.control_parameters): + return False for f, g in izip(self.functions, other.functions): if f != g: return False for f, g in izip(self.annotations, other.annotations): if f != g: return False + for f, g in izip(self.control_parameters, other.control_parameters): + if f != g: + return False return True def __ne__(self, other): @@ -420,10 +438,14 @@ def create_module(self, id_scope=None): functions = [ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType), name='value', parameters=params)] + control_parameters = [ModuleControlParam(id=id_scope.getNewId(ModuleControlParam.vtType), + name='combiner', + value='pairwise')] module = Module(id=id_scope.getNewId(Module.vtType), name='Float', package=basic_pkg, - functions=functions) + functions=functions, + controlParameters=control_parameters) return module def test_copy(self): diff --git a/vistrails/core/vistrail/module_control_param.py b/vistrails/core/vistrail/module_control_param.py new file mode 100644 index 000000000..87c047412 --- /dev/null +++ b/vistrails/core/vistrail/module_control_param.py @@ -0,0 +1,135 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### +from vistrails.db.domain import DBControlParameter + +import unittest +import copy + +class ModuleControlParam(DBControlParameter): + + ########################################################################## + # Constructors and copy + + def __init__(self, *args, **kwargs): + DBControlParameter.__init__(self, *args, **kwargs) + if self.id is None: + self.id = -1 + + def __copy__(self): + return ModuleControlParam.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBControlParameter.do_copy(self, new_ids, id_scope, id_remap) + cp.__class__ = ModuleControlParam + return cp + + @staticmethod + def convert(_control_parameter): + _control_parameter.__class__ = ModuleControlParam + + ########################################################################## + # Properties + + id = DBControlParameter.db_id + name = DBControlParameter.db_name + value = DBControlParameter.db_value + + ########################################################################## + # Operators + + def __str__(self): + """__str__() -> str - Returns a string representation of an ModuleControlParam + object. + + """ + rep = "" + return rep % (str(self.id), str(self.name), str(self.value)) + + def __eq__(self, other): + """ __eq__(other: ModuleControlParam) -> boolean + Returns True if self and other have the same attributes. Used by == + operator. + + """ + if type(self) != type(other): + return False + if self.name != other.name: + return False + if self.value != other.value: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + +################################################################################ +# Unit tests + + +class TestModuleControlParam(unittest.TestCase): + + def create_control_parameter(self, id_scope=None): + from vistrails.db.domain import IdScope + + if id_scope is None: + id_scope = IdScope() + control_parameter = ModuleControlParam(id=id_scope.getNewId(ModuleControlParam.vtType), + name='name %s', + value='some value %s') + return control_parameter + + def test_copy(self): + from vistrails.db.domain import IdScope + id_scope = IdScope() + + a1 = self.create_control_parameter(id_scope) + a2 = copy.copy(a1) + self.assertEquals(a1, a2) + self.assertEquals(a1.id, a2.id) + a3 = a1.do_copy(True, id_scope, {}) + self.assertEquals(a1, a3) + self.assertNotEquals(a1.id, a3.id) + + def test_serialization(self): + import vistrails.core.db.io + a1 = self.create_control_parameter() + xml_str = vistrails.core.db.io.serialize(a1) + a2 = vistrails.core.db.io.unserialize(xml_str, ModuleControlParam) + self.assertEquals(a1, a2) + self.assertEquals(a1.id, a2.id) + + def test_str(self): + a1 = self.create_control_parameter() + str(a1) diff --git a/vistrails/core/vistrail/operation.py b/vistrails/core/vistrail/operation.py index bcb4f7814..2cf5961b1 100644 --- a/vistrails/core/vistrail/operation.py +++ b/vistrails/core/vistrail/operation.py @@ -35,7 +35,7 @@ from vistrails.db.domain import DBAdd, DBChange, DBDelete from vistrails.db.domain import DBAnnotation, DBAbstraction, DBConnection, DBGroup, \ DBLocation, DBModule, DBFunction, DBPluginData, DBParameter, DBPort, \ - DBPortSpec + DBPortSpec, DBControlParameter from vistrails.core.vistrail.annotation import Annotation from vistrails.core.vistrail.abstraction import Abstraction @@ -43,6 +43,7 @@ from vistrails.core.vistrail.group import Group from vistrails.core.vistrail.location import Location from vistrails.core.vistrail.module import Module +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.core.vistrail.module_param import ModuleParam from vistrails.core.vistrail.plugin_data import PluginData @@ -67,6 +68,7 @@ def convert_data(_data): DBPluginData.vtType: PluginData, DBPort.vtType: Port, DBPortSpec.vtType: PortSpec, + DBControlParameter.vtType: ModuleControlParam, } try: map[_data.vtType].convert(_data) @@ -387,6 +389,13 @@ def create_ops(self, id_scope=IdScope()): what=Annotation.vtType, objectId=m.id, data=annotation) + cparam = ModuleControlParam(id=id_scope.getNewId(ModuleControlParam.vtType), + name='foo', + value='bar') + add_cparam = AddOp(id=id_scope.getNewId(AddOp.vtType), + what=ModuleControlParam.vtType, + objectId=m.id, + data=cparam) return [add_op, change_op, delete_op, add_annotation] diff --git a/vistrails/core/vistrail/pipeline.py b/vistrails/core/vistrail/pipeline.py index c2e7e3825..6f2e1e568 100644 --- a/vistrails/core/vistrail/pipeline.py +++ b/vistrails/core/vistrail/pipeline.py @@ -51,6 +51,7 @@ from vistrails.core.vistrail.connection import Connection from vistrails.core.vistrail.group import Group from vistrails.core.vistrail.module import Module +from vistrails.core.vistrail.module_control_param import ModuleControlParam from vistrails.core.vistrail.module_function import ModuleFunction from vistrails.core.vistrail.module_param import ModuleParam from vistrails.core.vistrail.plugin_data import PluginData @@ -966,6 +967,8 @@ def validate(self, raise_exception=True, vistrail_vars={}): self.is_valid = False return False + self.mark_list_depth() + self.is_valid = True return True @@ -1166,7 +1169,40 @@ def check_subworkflow_versions(self): for module in self.modules.itervalues(): if module.is_valid and module.is_abstraction(): module.check_latest_version() - + + def mark_list_depth(self): + """mark_list_depth() -> list + + Updates list_depth variable on each module according to list depth of + connecting port specs. This decides at what list depth the module + needs to be executed. + + """ + result = [] + for module_id in self.graph.vertices_topological_sort(): + module = self.get_module_by_id(module_id) + module.list_depth = 0 + ports = [] + for module_from_id, conn_id in self.graph.edges_to(module_id): + prev_depth = self.get_module_by_id(module_from_id).list_depth + conn = self.get_connection_by_id(conn_id) + source_depth = (conn.source.spec and + conn.source.spec.depth) or 0 + dest_depth = (conn.destination.spec and + conn.destination.spec.depth) or 0 + depth = prev_depth + source_depth - dest_depth + if depth > 0: + ports.append(conn.destination.spec.name) + # if dest depth is greater the input will be wrapped in a + # list to match its depth + # if source depth is greater this module will be executed + # once for each input in the (possibly nested) list + module.list_depth = max(module.list_depth, depth) + result.append((module_id, module.list_depth)) + module.iterated_ports = ports + return result + + ########################################################################## # Debugging @@ -1315,11 +1351,18 @@ def f3(): param.strValue = '4.0' f.params.append(param) return f + def cp1(): + f = ModuleControlParam() + f.id = id_scope.getNewId(ModuleControlParam.vtType) + f.name = 'cpname1' + f.value = 'cpvalue[]' + return f m = Module() m.id = id_scope.getNewId(Module.vtType) m.name = 'PythonCalc' m.package = '%s.pythoncalc' % get_vistrails_default_pkg_prefix() m.functions.append(f1()) + m.control_parameters.append(cp1()) return m def module2(p): diff --git a/vistrails/core/vistrail/port_spec.py b/vistrails/core/vistrail/port_spec.py index a32ec63bb..53c5bddf6 100644 --- a/vistrails/core/vistrail/port_spec.py +++ b/vistrails/core/vistrail/port_spec.py @@ -120,6 +120,8 @@ def __init__(self, *args, **kwargs): if 'sort_key' not in kwargs: kwargs['sort_key'] = -1 + if 'depth' not in kwargs: + kwargs['depth'] = 0 if 'id' not in kwargs: kwargs['id'] = -1 if 'tooltip' in kwargs: @@ -215,6 +217,7 @@ def from_sigstring(sigstring): sort_key = DBPortSpec.db_sort_key min_conns = DBPortSpec.db_min_conns max_conns = DBPortSpec.db_max_conns + _depth = DBPortSpec.db_depth port_spec_items = DBPortSpec.db_portSpecItems items = DBPortSpec.db_portSpecItems @@ -247,6 +250,12 @@ def _get_signature(self): return signature signature = property(_get_signature) + def _get_depth(self): + return self._depth or 0 + def _set_depth(self, depth): + self._depth = depth + depth = property(_get_depth, _set_depth) + def toolTip(self): if self._tooltip is None: self.create_tooltip() @@ -393,9 +402,11 @@ def create_tooltip(self): port_string = self.type.capitalize() else: port_string = 'Invalid' - self._tooltip = "%s port %s\n%s" % (port_string, + _depth = " (depth %s)" % self.depth if self.depth else '' + self._tooltip = "%s port %s\n%s%s" % (port_string, self.name, - self._short_sigstring) + self._short_sigstring, + _depth) ########################################################################## # Operators @@ -405,9 +416,9 @@ def __str__(self): object. """ - rep = "" + rep = "" return rep % (str(self.id), str(self.name), - str(self.type), str(self.sigstring)) + str(self.type), str(self.sigstring), str(self.depth)) def __eq__(self, other): """ __eq__(other: PortSpec) -> boolean diff --git a/vistrails/core/vistrail/vistrail.py b/vistrails/core/vistrail/vistrail.py index b847e31b7..5c8e7dcc1 100644 --- a/vistrails/core/vistrail/vistrail.py +++ b/vistrails/core/vistrail/vistrail.py @@ -456,6 +456,8 @@ def get_pipeline_diff_with_connections(self, v1, v2): [v1 not v2 modules], [v2 not v1 modules], [parameter-changed modules (see-below)], + [controlParameter-changed modules (see-below)], + [annotation-changed modules (see-below)], [shared connections (id in v1, id in v2) ...], [shared connections [heuristic] (id in v1, id in v2)], [c1 not in v2 connections], @@ -464,6 +466,12 @@ def get_pipeline_diff_with_connections(self, v1, v2): parameter-changed modules = [((module id in v1, module id in v2), [(function in v1, function in v2)...]), ...] + controlParameter-changed modules = [((module id in v1, module id in v2), + [(cparam in v1, cparam in v2)...]), + ...] + annotation-changed modules = [((module id in v1, module id in v2), + [(annotation in v1, annotation in v2)...]), + ...] """ return vistrails.core.db.io.get_workflow_diff_with_connections((self, v1), @@ -485,11 +493,20 @@ def get_pipeline_diff(self, v1, v2): [shared modules [heuristic match] (id in v1, id in v2)], [v1 not v2 modules], [v2 not v1 modules], - [parameter-changed modules (see-below)]) + [parameter-changed modules (see-below)], + [controlParameter-changed modules (see-below)], + [annotation-changed modules (see-below)]) parameter-changed modules = [((module id in v1, module id in v2), [(function in v1, function in v2)...]), ...] + controlParameter-changed modules = [((module id in v1, module id in v2), + [(cparam in v1, cparam in v2)...]), + ...] + annotation-changed modules = [((module id in v1, module id in v2), + [(annotation in v1, annotation in v2)...]), + ...] + """ return vistrails.core.db.io.get_workflow_diff((self, v1), (self, v2)) @@ -903,15 +920,18 @@ def get_description(self, version_number): added_parameters = 0 added_connections = 0 added_annotations = 0 + added_control_parameters = 0 added_ports = 0 moved_modules = 0 changed_parameters = 0 changed_annotations = 0 + changed_control_parameters = 0 deleted_modules = 0 deleted_connections = 0 deleted_parameters = 0 deleted_functions = 0 deleted_annotations = 0 + deleted_control_parameters = 0 deleted_ports = 0 for op in ops: if op.vtType == 'add': @@ -923,6 +943,8 @@ def get_description(self, version_number): added_functions+=1 elif op.what == 'parameter': added_parameters+=1 + elif op.what == 'controlParameter': + added_control_parameters+=1 elif op.what == 'annotation': added_annotations+=1 elif op.what == 'portSpec': @@ -934,6 +956,8 @@ def get_description(self, version_number): moved_modules+=1 elif op.what == 'annotation': changed_annotations+=1 + elif op.what == 'controlParameter': + changed_control_parameters+=1 elif op.vtType == 'delete': if op.what == 'module': deleted_modules+=1 @@ -945,6 +969,8 @@ def get_description(self, version_number): deleted_parameters+=1 elif op.what == 'annotation': deleted_annotations+=1 + elif op.what == 'controlParameter': + deleted_control_parameters+=1 elif op.what == 'portSpec': deleted_ports += 1 else: @@ -962,6 +988,10 @@ def get_description(self, version_number): description = "Added parameter" if added_functions > 1 or added_parameters > 1: description += "s" + elif added_control_parameters: + description = "Added control parameter" + if added_control_parameters > 1: + description += "s" elif added_annotations: description = "Added annotation" if added_annotations > 1: @@ -974,6 +1004,10 @@ def get_description(self, version_number): description = "Changed parameter" if changed_parameters > 1: description += "s" + elif changed_control_parameters: + description = "Changed control parameter" + if changed_control_parameters > 1: + description += "s" elif moved_modules: description = "Moved module" if moved_modules > 1: @@ -994,6 +1028,10 @@ def get_description(self, version_number): description = "Deleted parameter" if deleted_parameters > 1 or deleted_functions > 1: description += "s" + elif deleted_control_parameters: + description = "Deleted control parameter" + if deleted_control_parameters > 1: + description += "s" elif deleted_annotations: description = "Deleted annotation" if deleted_annotations > 1: @@ -1235,7 +1273,6 @@ def test_serialization(self): # FIXME add checks for equality def test1(self): - import vistrails.core.vistrail from vistrails.core.db.locator import XMLFileLocator import vistrails.core.system v = XMLFileLocator(vistrails.core.system.vistrails_root_directory() + @@ -1258,7 +1295,6 @@ def test1(self): self.fail("vistrails tree is not single rooted.") def test2(self): - import vistrails.core.vistrail from vistrails.core.db.locator import XMLFileLocator import vistrails.core.system v = XMLFileLocator(vistrails.core.system.vistrails_root_directory() + @@ -1269,6 +1305,8 @@ def test2(self): v3 = 22 v.get_pipeline_diff(v1,v2) v.get_pipeline_diff(v1,v3) + v.get_pipeline_diff_with_connections(v1,v2) + v.get_pipeline_diff_with_connections(v1,v3) def test_empty_action_chain(self): """Tests calling action chain on empty version.""" diff --git a/vistrails/db/domain/__init__.py b/vistrails/db/domain/__init__.py index 133b69efb..d3bc3632c 100644 --- a/vistrails/db/domain/__init__.py +++ b/vistrails/db/domain/__init__.py @@ -33,4 +33,4 @@ ## ############################################################################### -from vistrails.db.versions.v1_0_3.domain import * +from vistrails.db.versions.v1_0_4.domain import * diff --git a/vistrails/db/persistence/__init__.py b/vistrails/db/persistence/__init__.py index 134d6bb8c..f0ee2663e 100644 --- a/vistrails/db/persistence/__init__.py +++ b/vistrails/db/persistence/__init__.py @@ -33,4 +33,4 @@ ## ############################################################################### -from vistrails.db.versions.v1_0_3.persistence import * +from vistrails.db.versions.v1_0_4.persistence import * diff --git a/vistrails/db/services/vistrail.py b/vistrails/db/services/vistrail.py index 2e093c773..5b453fd48 100644 --- a/vistrails/db/services/vistrail.py +++ b/vistrails/db/services/vistrail.py @@ -44,6 +44,7 @@ import unittest import vistrails.core.system +from itertools import chain def update_id_scope(vistrail): if hasattr(vistrail, 'update_id_scope'): @@ -853,7 +854,44 @@ def heuristicModuleMatch(m1, m2): m2_functions.remove(f2) else: return 0 - if len(m1_functions) == len(m2_functions) == 0: + + m1_cparams = copy.copy(m1.db_get_controlParameters()) + m2_cparams = copy.copy(m2.db_get_controlParameters()) + if len(m1_cparams) != len(m2_cparams): + return 0 + for cp1 in m1_cparams[:]: + match = None + for cp2 in m2_cparams: + isMatch = heuristicControlParameterMatch(cp1, cp2) + if isMatch == 1: + match = cp2 + break + if match is not None: + m1_cparams.remove(cp1) + m2_cparams.remove(cp2) + else: + return 0 + + m1_annots = copy.copy(m1.db_get_annotations()) + m2_annots = copy.copy(m2.db_get_annotations()) + if len(m1_annots) != len(m2_annots): + return 0 + for a1 in m1_annots[:]: + match = None + for a2 in m2_annots: + isMatch = heuristicAnnotationMatch(a1, a2) + if isMatch == 1: + match = a2 + break + if match is not None: + m1_annots.remove(a1) + m2_annots.remove(a2) + else: + return 0 + + if len(m1_functions) == len(m2_functions) == \ + len(m1_cparams ) == len(m2_cparams ) == \ + len(m1_annots ) == len(m2_annots ) == 0: return 1 else: return 0 @@ -899,6 +937,30 @@ def heuristicParameterMatch(p1, p2): return 0 return -1 +def heuristicControlParameterMatch(cp1, cp2): + """takes two control parameters and returns 1 if exact match, + 0 if partial match (types match), -1 if no match + + """ + if cp1.db_name == cp2.db_name: + if cp1.db_value == cp2.db_value: + return 1 + else: + return 0 + return -1 + +def heuristicAnnotationMatch(a1, a2): + """takes two annotations and returns 1 if exact match, + 0 if partial match (types match), -1 if no match + + """ + if a1.db_key == a2.db_key: + if a1.db_value == a2.db_value: + return 1 + else: + return 0 + return -1 + def heuristicConnectionMatch(c1, c2): """takes two connections and returns 1 if exact match, 0 if partial match (currently undefined), -1 if no match @@ -1016,6 +1078,155 @@ def getParamChanges(m1, m2, same_vt=True, heuristic_match=True): return paramChanges +def getCParamChanges(m1, m2, same_vt=True, heuristic_match=True): + cparamChanges = [] + # need to check to see if any children of m1 and m2 are affected + m1_cparams = m1.db_get_controlParameters() + m2_cparams = m2.db_get_controlParameters() + m1_unmatched = [] + m2_unmatched = [] + if same_vt: + for cp1 in m1_cparams: + # see if m2 has f1, too + cp2 = m2.db_get_controlParameter(cp1.db_id) + if cp2 is None: + m1_unmatched.append(cp1) + else: + # cparam is same, check if it has changed + if heuristic_match: + matchValue = heuristicControlParameterMatch(cp1, cp2) + if matchValue != 1: + cparamChanges.append(((cp1.db_name,cp1.db_value), + (cp2.db_name,cp2.db_value))) + else: + cparamChanges.append(((cp1.db_name,cp1.db_value), + (cp2.db_name,cp2.db_value))) + for cp2 in m2_cparams: + # see if m1 has f2, too + if m1.db_get_controlParameter(cp2.db_id) is None: + m2_unmatched.append(cp2) + else: + m1_unmatched.extend(m1_cparams) + m2_unmatched.extend(m2_cparams) + +# functionMatch = True +# f1_params = f1.db_get_parameters() +# f2_params = f2.db_get_parameters() +# for p1 in f1_params: +# if f2.db_get_parameter(p1.db_id) is None: +# functionMatch = False +# m1_unmatched.append(f1) +# break +# for p2 in f2_params: +# if f1.db_get_parameter(p2.db_id) is None: +# functionMatch = False +# m2_unmatched.append(f2) +# break +# if functionMatch: + + if len(m1_unmatched) + len(m2_unmatched) > 0: + if heuristic_match and len(m1_unmatched) > 0 and len(m2_unmatched) > 0: + # do heuristic matches + for cp1 in m1_unmatched[:]: + matched = False + matchValue = 0 + for cp2 in m2_unmatched: + matchValue = heuristicControlParameterMatch(cp1, cp2) + if matchValue == 1: + # best match so quit + matched = cp1 + break + elif matchValue == 0: + # match, but not exact so continue to look + matched = cp1 + if matched: + if matchValue != 1: + cparamChanges.append(((cp1.db_name,cp1.db_value), + (cp2.db_name,cp2.db_value))) + m1_unmatched.remove(cp1) + m2_unmatched.remove(cp2) + + for cp in m1_unmatched: + cparamChanges.append(((cp.db_name,cp.db_value), (None, None))) + for cp in m2_unmatched: + cparamChanges.append(((None, None), (cp.db_name,cp.db_value))) + return cparamChanges + +def getAnnotationChanges(m1, m2, same_vt=True, heuristic_match=True): + annotChanges = [] + # need to check to see if any children of m1 and m2 are affected + m1_annots = m1.db_get_annotations() + m2_annots = m2.db_get_annotations() + m1_unmatched = [] + m2_unmatched = [] + if same_vt: + for a1 in m1_annots: + # see if m2 has f1, too + a2 = m2.db_get_annotation(a1.db_id) + if a2 is None: + m1_unmatched.append(a1) + else: + # cparam is same, check if it has changed + if heuristic_match: + matchValue = heuristicAnnotationMatch(a1, a2) + if matchValue != 1: + annotChanges.append(((a1.db_key,a1.db_value), + (a2.db_key,a2.db_value))) + else: + annotChanges.append(((a1.db_key,a1.db_value), + (a2.db_key,a2.db_value))) + for a2 in m2_annots: + # see if m1 has f2, too + if m1.db_get_annotation(a2.db_id) is None: + m2_unmatched.append(a2) + else: + m1_unmatched.extend(m1_annots) + m2_unmatched.extend(m2_annots) + +# functionMatch = True +# f1_params = f1.db_get_parameters() +# f2_params = f2.db_get_parameters() +# for p1 in f1_params: +# if f2.db_get_parameter(p1.db_id) is None: +# functionMatch = False +# m1_unmatched.append(f1) +# break +# for p2 in f2_params: +# if f1.db_get_parameter(p2.db_id) is None: +# functionMatch = False +# m2_unmatched.append(f2) +# break +# if functionMatch: + + if len(m1_unmatched) + len(m2_unmatched) > 0: + if heuristic_match and len(m1_unmatched) > 0 and len(m2_unmatched) > 0: + # do heuristic matches + for a1 in m1_unmatched[:]: + matched = False + matchValue = 0 + for a2 in m2_unmatched: + matchValue = heuristicAnnotationMatch(a1, a2) + if matchValue == 1: + # best match so quit + matched = a1 + break + elif matchValue == 0: + # match, but not exact so continue to look + matched = a1 + if matched: + if matchValue != 1: + annotChanges.append(((a1.db_key,a1.db_value), + (a2.db_key,a2.db_value))) + m1_unmatched.remove(a1) + m2_unmatched.remove(a2) + + for cp in m1_unmatched: + annotChanges.append(((cp.db_key,cp.db_value), (None, None))) + for cp in m2_unmatched: + annotChanges.append(((None, None), (cp.db_key,cp.db_value))) + + return annotChanges + def getOldObjId(operation): if operation.vtType == 'change': return operation.db_oldObjId @@ -1058,6 +1269,8 @@ def getWorkflowDiffCommon(vistrail, v1, v2, heuristic_match=True): sharedModuleIds = [] sharedConnectionIds = [] sharedFunctionIds = {} + sharedCParameterIds = {} + sharedAnnotationIds = {} for op in sharedOps: if op.what == 'module' or op.what == 'abstraction' or \ op.what == 'group': @@ -1066,10 +1279,16 @@ def getWorkflowDiffCommon(vistrail, v1, v2, heuristic_match=True): sharedConnectionIds.append(getNewObjId(op)) elif op.what == 'function': sharedFunctionIds[getNewObjId(op)] = op.db_parentObjId + elif op.what == 'controlParameter': + sharedCParameterIds[getNewObjId(op)] = op.db_parentObjId + elif op.what == 'annotation': + sharedAnnotationIds[getNewObjId(op)] = op.db_parentObjId vOnlyModules = [] vOnlyConnections = [] paramChgModules = {} + cparamChgModules = {} + annotChgModules = {} for (vAdds, vDeletes, _) in vOnlyOps: moduleDeleteIds = [] connectionDeleteIds = [] @@ -1096,6 +1315,24 @@ def getWorkflowDiffCommon(vistrail, v1, v2, heuristic_match=True): if moduleId in sharedModuleIds: paramChgModules[moduleId] = None sharedModuleIds.remove(moduleId) + elif op.what == 'controlParameter' and \ + (op.db_parentObjType == 'module' or + op.db_parentObjType == 'abstraction' or + op.db_parentObjType == 'group') and \ + op.db_parentObjId in sharedCParameterIds and \ + op.db_parentObjId in sharedModuleIds: + # have a control parameter change + cparamChgModules[op.db_parentObjId] = None + sharedModuleIds.remove(op.db_parentObjId) + elif op.what == 'annotation' and \ + (op.db_parentObjType == 'module' or + op.db_parentObjType == 'abstraction' or + op.db_parentObjType == 'group') and \ + op.db_parentObjId in sharedAnnotationIds and \ + op.db_parentObjId in sharedModuleIds: + # have an annotation change + annotChgModules[op.db_parentObjId] = None + sharedModuleIds.remove(op.db_parentObjId) elif op.what == 'connection': connectionDeleteIds.append(getOldObjId(op)) if getOldObjId(op) in sharedConnectionIds: @@ -1122,6 +1359,24 @@ def getWorkflowDiffCommon(vistrail, v1, v2, heuristic_match=True): if moduleId in sharedModuleIds: paramChgModules[moduleId] = None sharedModuleIds.remove(moduleId) + elif (op.what == 'controlParameter' and + (op.db_parentObjType == 'module' or + op.db_parentObjType == 'abstraction' or + op.db_parentObjType == 'group') and + op.db_parentObjId in sharedCParameterIds and + op.db_parentObjId in sharedModuleIds): + # have a control parameter change + cparamChgModules[op.db_parentObjId] = None + sharedModuleIds.remove(op.db_parentObjId) + elif (op.what == 'annotation' and + (op.db_parentObjType == 'module' or + op.db_parentObjType == 'abstraction' or + op.db_parentObjType == 'group') and + op.db_parentObjId in sharedAnnotationIds and + op.db_parentObjId in sharedModuleIds): + # have an annotation change + annotChgModules[op.db_parentObjId] = None + sharedModuleIds.remove(op.db_parentObjId) elif op.what == 'connection': connectionAddIds.append(getOldObjId(op)) @@ -1149,22 +1404,31 @@ def getWorkflowDiffCommon(vistrail, v1, v2, heuristic_match=True): c2Only.append(id) paramChgModulePairs = [(id, id) for id in paramChgModules.keys()] - + cparamChgModulePairs = [(id, id) for id in cparamChgModules.keys()] + annotChgModulePairs = [(id, id) for id in annotChgModules.keys()] # print "^^^^ SHARED MODULE PAIRS:", sharedModulePairs + c1Only, c2Only, heuristicConnectionPairs = [], [], [] + if heuristic_match: (heuristicModulePairs, heuristicConnectionPairs, v1Only, v2Only, \ c1Only, c2Only) = do_heuristic_diff(v1Workflow, v2Workflow, \ v1Only, v2Only, \ c1Only, c2Only) paramChgModulePairs.extend(heuristicModulePairs) - - (heuristicModulePairs, paramChanges) = \ - check_params_diff(v1Workflow, v2Workflow, paramChgModulePairs, + cparamChgModulePairs.extend(heuristicModulePairs) + annotChgModulePairs.extend(heuristicModulePairs) + allChgModulePairs = list(set(chain(paramChgModulePairs, + cparamChgModulePairs, + annotChgModulePairs))) + + (heuristicModulePairs, paramChanges, cparam_changes, annot_changes) = \ + check_params_diff(v1Workflow, v2Workflow, allChgModulePairs, True, heuristic_match) return (v1Workflow, v2Workflow, sharedModulePairs, heuristicModulePairs, v1Only, v2Only, - paramChanges, sharedConnectionPairs, heuristicConnectionPairs, + paramChanges, cparam_changes, annot_changes, + sharedConnectionPairs, heuristicConnectionPairs, c1Only, c2Only) def do_heuristic_diff(v1Workflow, v2Workflow, v1_modules, v2_modules, @@ -1172,7 +1436,6 @@ def do_heuristic_diff(v1Workflow, v2Workflow, v1_modules, v2_modules, # add heuristic matches heuristicModulePairs = [] heuristicConnectionPairs = [] - paramChgModulePairs = [] v1Only = copy.copy(v1_modules) v2Only = copy.copy(v2_modules) @@ -1205,7 +1468,6 @@ def do_heuristic_diff(v1Workflow, v2Workflow, v1_modules, v2_modules, v2Only.remove(match[1]) # we now check all heuristic pairs for parameter changes heuristicModulePairs.append(match) - # paramChgModulePairs.append(match) # match connections for c1_id in c1Only[:]: @@ -1232,6 +1494,8 @@ def check_params_diff(v1Workflow, v2Workflow, paramChgModulePairs, same_vt=True, heuristic_match=True): matched = [] paramChanges = [] + cparamChanges = [] + annotChanges = [] # print "^^^^ PARAM CHG PAIRS:", paramChgModulePairs for (m1_id, m2_id) in paramChgModulePairs: m1 = v1Workflow.db_get_module(m1_id) @@ -1239,11 +1503,19 @@ def check_params_diff(v1Workflow, v2Workflow, paramChgModulePairs, moduleParamChanges = getParamChanges(m1, m2, same_vt, heuristic_match) if len(moduleParamChanges) > 0: paramChanges.append(((m1_id, m2_id), moduleParamChanges)) - else: + moduleCParamChanges = getCParamChanges(m1, m2, same_vt, + heuristic_match) + if len(moduleCParamChanges) > 0: + cparamChanges.append(((m1_id, m2_id), moduleCParamChanges)) + moduleAnnotChanges = getAnnotationChanges(m1, m2, same_vt, + heuristic_match) + if len(moduleAnnotChanges) > 0: + annotChanges.append(((m1_id, m2_id), moduleAnnotChanges)) + if len(moduleParamChanges) == len(moduleCParamChanges) == \ + len(moduleAnnotChanges) == 0: # heuristicModulePairs.append((m1_id, m2_id)) matched.append((m1_id, m2_id)) - - return (matched, paramChanges) + return (matched, paramChanges, cparamChanges, annotChanges) def getWorkflowDiff(vt_pair_1, vt_pair_2, heuristic_match=True): (vistrail_1, v_1) = vt_pair_1 @@ -1263,13 +1535,14 @@ def getWorkflowDiff(vt_pair_1, vt_pair_2, heuristic_match=True): (m_matches, c_matches, modules_1, modules_2, conns_1, conns_2) = \ do_heuristic_diff(workflow_1, workflow_2, modules_1, modules_2, \ conns_1, conns_2) - (m_matches, param_changes) = check_params_diff(workflow_1, workflow_2, + (m_matches, param_changes, cparam_changes, annot_changes) = \ + check_params_diff(workflow_1, workflow_2, m_matches, False, heuristic_match) return (workflow_1, workflow_2, [], m_matches, modules_1, modules_2, - param_changes, [], c_matches, conns_1, conns_2) + param_changes, cparam_changes, annot_changes, [], c_matches, conns_1, conns_2) - return (workflow_1, workflow_2, [], [], modules_1, modules_2, [], [], [], + return (workflow_1, workflow_2, [], [], modules_1, modules_2, [], [], [], [], [], conns_1, conns_2) ################################################################################ diff --git a/vistrails/db/specs/all.xml b/vistrails/db/specs/all.xml index e63a8bee1..2a395b82f 100644 --- a/vistrails/db/specs/all.xml +++ b/vistrails/db/specs/all.xml @@ -92,7 +92,12 @@ index="key"> - + + + + + @@ -235,6 +240,9 @@ + + + @@ -423,6 +431,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -528,6 +603,9 @@ + + + @@ -790,7 +868,12 @@ index="key"> - + + + + + @@ -1034,6 +1117,11 @@ + + + + @@ -1670,6 +1758,11 @@ + + + + + @@ -2064,6 +2157,11 @@ + + + + diff --git a/vistrails/db/versions/__init__.py b/vistrails/db/versions/__init__.py index ad963647e..4e2387461 100644 --- a/vistrails/db/versions/__init__.py +++ b/vistrails/db/versions/__init__.py @@ -38,7 +38,7 @@ from vistrails.core.system import vistrails_root_directory from vistrails.db import VistrailsDBException -currentVersion = '1.0.3' +currentVersion = '1.0.4' def getVersionDAO(version=None): if version is None: @@ -80,9 +80,11 @@ def translate_object(obj, method_name, version=None, target_version=None): '1.0.0': '1.0.1', '1.0.1': '1.0.2', '1.0.2': '1.0.3', + '1.0.3': '1.0.4', } rev_version_map = { + '1.0.4': '1.0.3', '1.0.3': '1.0.2', '1.0.2': '1.0.1', '1.0.1': '1.0.0', diff --git a/vistrails/db/versions/v1_0_3/translate/v1_0_4.py b/vistrails/db/versions/v1_0_3/translate/v1_0_4.py new file mode 100644 index 000000000..46afd6b6e --- /dev/null +++ b/vistrails/db/versions/v1_0_3/translate/v1_0_4.py @@ -0,0 +1,101 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### +from vistrails.db.versions.v1_0_3.domain import DBVistrail, DBAnnotation, \ + DBWorkflow, DBLog, DBRegistry, \ + DBPortSpec, DBAdd, DBChange, DBDelete +from vistrails.core import debug +from vistrails.core.system import get_elementtree_library +ElementTree = get_elementtree_library() + +id_scope = None + +def translateVistrail(_vistrail): + """ Translate new DBVistrailVariable based vistrail variables to old + annotation based type """ + global id_scope + + def update_workflow(old_obj, trans_dict): + return DBWorkflow.update_version(old_obj.db_workflow, + trans_dict, DBWorkflow()) + + def update_operations(old_obj, trans_dict): + new_ops = [] + for obj in old_obj.db_operations: + if obj.vtType == 'delete': + new_ops.append(DBDelete.update_version(obj, trans_dict)) + elif obj.vtType == 'add': + new_op = DBAdd.update_version(obj, trans_dict) + new_ops.append(new_op) + elif obj.vtType == 'change': + new_op = DBChange.update_version(obj, trans_dict) + new_ops.append(new_op) + return new_ops + + vistrail = DBVistrail() + id_scope = vistrail.idScope + + translate_dict = {'DBAction': {'operations': update_operations}, + 'DBGroup': {'workflow': update_workflow}, + 'DBVistrail': {'annotations': update_annotations}, + } + + vistrail = DBVistrail.update_version(_vistrail, translate_dict, vistrail) + + vistrail.db_version = '1.0.3' + return vistrail + +def translateWorkflow(_workflow): + def update_workflow(old_obj, translate_dict): + return DBWorkflow.update_version(old_obj.db_workflow, translate_dict) + translate_dict = {'DBGroup': {'workflow': update_workflow}} + workflow = DBWorkflow.update_version(_workflow, translate_dict) + + workflow.db_version = '1.0.3' + return workflow + +def translateLog(_log): + translate_dict = {} + log = DBLog.update_version(_log, translate_dict) + log.db_version = '1.0.3' + return log + +def translateRegistry(_registry): + global id_scope + translate_dict = {} + registry = DBRegistry() + id_scope = registry.idScope + vistrail = DBRegistry.update_version(_registry, translate_dict, registry) + registry.db_version = '1.0.3' + return registry diff --git a/vistrails/db/versions/v1_0_4/__init__.py b/vistrails/db/versions/v1_0_4/__init__.py new file mode 100644 index 000000000..7979ce21d --- /dev/null +++ b/vistrails/db/versions/v1_0_4/__init__.py @@ -0,0 +1,36 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +version = '1.0.4' diff --git a/vistrails/db/versions/v1_0_4/domain/__init__.py b/vistrails/db/versions/v1_0_4/domain/__init__.py new file mode 100644 index 000000000..a8208f600 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/__init__.py @@ -0,0 +1,41 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +from auto_gen import * +from registry import DBRegistry +from workflow import DBWorkflow +from vistrail import DBVistrail +from log import DBLog +from id_scope import IdScope diff --git a/vistrails/db/versions/v1_0_4/domain/auto_gen.py b/vistrails/db/versions/v1_0_4/domain/auto_gen.py new file mode 100644 index 000000000..28bf7d553 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/auto_gen.py @@ -0,0 +1,17613 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +"""generated automatically by auto_dao.py""" + +import copy + +class DBOpmProcessIdEffect(object): + + vtType = 'opm_process_id_effect' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmProcessIdEffect.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmProcessIdEffect(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_process', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmProcessIdEffect() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBVistrailVariable(object): + + vtType = 'vistrailVariable' + + def __init__(self, name=None, uuid=None, package=None, module=None, namespace=None, value=None): + self._db_name = name + self._db_uuid = uuid + self._db_package = package + self._db_module = module + self._db_namespace = namespace + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBVistrailVariable.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBVistrailVariable(name=self._db_name, + uuid=self._db_uuid, + package=self._db_package, + module=self._db_module, + namespace=self._db_namespace, + value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBVistrailVariable() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'uuid' in class_dict: + res = class_dict['uuid'](old_obj, trans_dict) + new_obj.db_uuid = res + elif hasattr(old_obj, 'db_uuid') and old_obj.db_uuid is not None: + new_obj.db_uuid = old_obj.db_uuid + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'module' in class_dict: + res = class_dict['module'](old_obj, trans_dict) + new_obj.db_module = res + elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None: + new_obj.db_module = old_obj.db_module + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_uuid(self): + return self._db_uuid + def __set_db_uuid(self, uuid): + self._db_uuid = uuid + self.is_dirty = True + db_uuid = property(__get_db_uuid, __set_db_uuid) + def db_add_uuid(self, uuid): + self._db_uuid = uuid + def db_change_uuid(self, uuid): + self._db_uuid = uuid + def db_delete_uuid(self, uuid): + self._db_uuid = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_module(self): + return self._db_module + def __set_db_module(self, module): + self._db_module = module + self.is_dirty = True + db_module = property(__get_db_module, __set_db_module) + def db_add_module(self, module): + self._db_module = module + def db_change_module(self, module): + self._db_module = module + def db_delete_module(self, module): + self._db_module = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def getPrimaryKey(self): + return self._db_name + +class DBProvAgent(object): + + vtType = 'prov_agent' + + def __init__(self, id=None, vt_id=None, prov_type=None, prov_label=None, vt_machine_os=None, vt_machine_architecture=None, vt_machine_processor=None, vt_machine_ram=None): + self._db_id = id + self._db_vt_id = vt_id + self._db_prov_type = prov_type + self._db_prov_label = prov_label + self._db_vt_machine_os = vt_machine_os + self._db_vt_machine_architecture = vt_machine_architecture + self._db_vt_machine_processor = vt_machine_processor + self._db_vt_machine_ram = vt_machine_ram + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvAgent.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvAgent(id=self._db_id, + vt_id=self._db_vt_id, + prov_type=self._db_prov_type, + prov_label=self._db_prov_label, + vt_machine_os=self._db_vt_machine_os, + vt_machine_architecture=self._db_vt_machine_architecture, + vt_machine_processor=self._db_vt_machine_processor, + vt_machine_ram=self._db_vt_machine_ram) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvAgent() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'vt_id' in class_dict: + res = class_dict['vt_id'](old_obj, trans_dict) + new_obj.db_vt_id = res + elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None: + new_obj.db_vt_id = old_obj.db_vt_id + if 'prov_type' in class_dict: + res = class_dict['prov_type'](old_obj, trans_dict) + new_obj.db_prov_type = res + elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None: + new_obj.db_prov_type = old_obj.db_prov_type + if 'prov_label' in class_dict: + res = class_dict['prov_label'](old_obj, trans_dict) + new_obj.db_prov_label = res + elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None: + new_obj.db_prov_label = old_obj.db_prov_label + if 'vt_machine_os' in class_dict: + res = class_dict['vt_machine_os'](old_obj, trans_dict) + new_obj.db_vt_machine_os = res + elif hasattr(old_obj, 'db_vt_machine_os') and old_obj.db_vt_machine_os is not None: + new_obj.db_vt_machine_os = old_obj.db_vt_machine_os + if 'vt_machine_architecture' in class_dict: + res = class_dict['vt_machine_architecture'](old_obj, trans_dict) + new_obj.db_vt_machine_architecture = res + elif hasattr(old_obj, 'db_vt_machine_architecture') and old_obj.db_vt_machine_architecture is not None: + new_obj.db_vt_machine_architecture = old_obj.db_vt_machine_architecture + if 'vt_machine_processor' in class_dict: + res = class_dict['vt_machine_processor'](old_obj, trans_dict) + new_obj.db_vt_machine_processor = res + elif hasattr(old_obj, 'db_vt_machine_processor') and old_obj.db_vt_machine_processor is not None: + new_obj.db_vt_machine_processor = old_obj.db_vt_machine_processor + if 'vt_machine_ram' in class_dict: + res = class_dict['vt_machine_ram'](old_obj, trans_dict) + new_obj.db_vt_machine_ram = res + elif hasattr(old_obj, 'db_vt_machine_ram') and old_obj.db_vt_machine_ram is not None: + new_obj.db_vt_machine_ram = old_obj.db_vt_machine_ram + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_vt_id(self): + return self._db_vt_id + def __set_db_vt_id(self, vt_id): + self._db_vt_id = vt_id + self.is_dirty = True + db_vt_id = property(__get_db_vt_id, __set_db_vt_id) + def db_add_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_change_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_delete_vt_id(self, vt_id): + self._db_vt_id = None + + def __get_db_prov_type(self): + return self._db_prov_type + def __set_db_prov_type(self, prov_type): + self._db_prov_type = prov_type + self.is_dirty = True + db_prov_type = property(__get_db_prov_type, __set_db_prov_type) + def db_add_prov_type(self, prov_type): + self._db_prov_type = prov_type + def db_change_prov_type(self, prov_type): + self._db_prov_type = prov_type + def db_delete_prov_type(self, prov_type): + self._db_prov_type = None + + def __get_db_prov_label(self): + return self._db_prov_label + def __set_db_prov_label(self, prov_label): + self._db_prov_label = prov_label + self.is_dirty = True + db_prov_label = property(__get_db_prov_label, __set_db_prov_label) + def db_add_prov_label(self, prov_label): + self._db_prov_label = prov_label + def db_change_prov_label(self, prov_label): + self._db_prov_label = prov_label + def db_delete_prov_label(self, prov_label): + self._db_prov_label = None + + def __get_db_vt_machine_os(self): + return self._db_vt_machine_os + def __set_db_vt_machine_os(self, vt_machine_os): + self._db_vt_machine_os = vt_machine_os + self.is_dirty = True + db_vt_machine_os = property(__get_db_vt_machine_os, __set_db_vt_machine_os) + def db_add_vt_machine_os(self, vt_machine_os): + self._db_vt_machine_os = vt_machine_os + def db_change_vt_machine_os(self, vt_machine_os): + self._db_vt_machine_os = vt_machine_os + def db_delete_vt_machine_os(self, vt_machine_os): + self._db_vt_machine_os = None + + def __get_db_vt_machine_architecture(self): + return self._db_vt_machine_architecture + def __set_db_vt_machine_architecture(self, vt_machine_architecture): + self._db_vt_machine_architecture = vt_machine_architecture + self.is_dirty = True + db_vt_machine_architecture = property(__get_db_vt_machine_architecture, __set_db_vt_machine_architecture) + def db_add_vt_machine_architecture(self, vt_machine_architecture): + self._db_vt_machine_architecture = vt_machine_architecture + def db_change_vt_machine_architecture(self, vt_machine_architecture): + self._db_vt_machine_architecture = vt_machine_architecture + def db_delete_vt_machine_architecture(self, vt_machine_architecture): + self._db_vt_machine_architecture = None + + def __get_db_vt_machine_processor(self): + return self._db_vt_machine_processor + def __set_db_vt_machine_processor(self, vt_machine_processor): + self._db_vt_machine_processor = vt_machine_processor + self.is_dirty = True + db_vt_machine_processor = property(__get_db_vt_machine_processor, __set_db_vt_machine_processor) + def db_add_vt_machine_processor(self, vt_machine_processor): + self._db_vt_machine_processor = vt_machine_processor + def db_change_vt_machine_processor(self, vt_machine_processor): + self._db_vt_machine_processor = vt_machine_processor + def db_delete_vt_machine_processor(self, vt_machine_processor): + self._db_vt_machine_processor = None + + def __get_db_vt_machine_ram(self): + return self._db_vt_machine_ram + def __set_db_vt_machine_ram(self, vt_machine_ram): + self._db_vt_machine_ram = vt_machine_ram + self.is_dirty = True + db_vt_machine_ram = property(__get_db_vt_machine_ram, __set_db_vt_machine_ram) + def db_add_vt_machine_ram(self, vt_machine_ram): + self._db_vt_machine_ram = vt_machine_ram + def db_change_vt_machine_ram(self, vt_machine_ram): + self._db_vt_machine_ram = vt_machine_ram + def db_delete_vt_machine_ram(self, vt_machine_ram): + self._db_vt_machine_ram = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmWasGeneratedBy(object): + + vtType = 'opm_was_generated_by' + + def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None): + self.db_deleted_effect = [] + self._db_effect = effect + self.db_deleted_role = [] + self._db_role = role + self.db_deleted_cause = [] + self._db_cause = cause + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.db_deleted_opm_times = [] + if opm_times is None: + self._db_opm_times = [] + else: + self._db_opm_times = opm_times + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmWasGeneratedBy.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmWasGeneratedBy() + if self._db_effect is not None: + cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap) + if self._db_role is not None: + cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap) + if self._db_cause is not None: + cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_opm_times is None: + cp._db_opm_times = [] + else: + cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmWasGeneratedBy() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'effect' in class_dict: + res = class_dict['effect'](old_obj, trans_dict) + new_obj.db_effect = res + elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None: + obj = old_obj.db_effect + new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'): + for obj in old_obj.db_deleted_effect: + n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict) + new_obj.db_deleted_effect.append(n_obj) + if 'role' in class_dict: + res = class_dict['role'](old_obj, trans_dict) + new_obj.db_role = res + elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None: + obj = old_obj.db_role + new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'): + for obj in old_obj.db_deleted_role: + n_obj = DBOpmRole.update_version(obj, trans_dict) + new_obj.db_deleted_role.append(n_obj) + if 'cause' in class_dict: + res = class_dict['cause'](old_obj, trans_dict) + new_obj.db_cause = res + elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None: + obj = old_obj.db_cause + new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'): + for obj in old_obj.db_deleted_cause: + n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict) + new_obj.db_deleted_cause.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'opm_times' in class_dict: + res = class_dict['opm_times'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_time(obj) + elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None: + for obj in old_obj.db_opm_times: + new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'): + for obj in old_obj.db_deleted_opm_times: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_opm_times.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_effect is not None: + children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_effect = None + if self._db_role is not None: + children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_role = None + if self._db_cause is not None: + children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_cause = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_opm_times: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_time(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_effect) + children.extend(self.db_deleted_role) + children.extend(self.db_deleted_cause) + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_opm_times) + if remove: + self.db_deleted_effect = [] + self.db_deleted_role = [] + self.db_deleted_cause = [] + self.db_deleted_accounts = [] + self.db_deleted_opm_times = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_effect is not None and self._db_effect.has_changes(): + return True + if self._db_role is not None and self._db_role.has_changes(): + return True + if self._db_cause is not None and self._db_cause.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_opm_times: + if child.has_changes(): + return True + return False + def __get_db_effect(self): + return self._db_effect + def __set_db_effect(self, effect): + self._db_effect = effect + self.is_dirty = True + db_effect = property(__get_db_effect, __set_db_effect) + def db_add_effect(self, effect): + self._db_effect = effect + def db_change_effect(self, effect): + self._db_effect = effect + def db_delete_effect(self, effect): + if not self.is_new: + self.db_deleted_effect.append(self._db_effect) + self._db_effect = None + + def __get_db_role(self): + return self._db_role + def __set_db_role(self, role): + self._db_role = role + self.is_dirty = True + db_role = property(__get_db_role, __set_db_role) + def db_add_role(self, role): + self._db_role = role + def db_change_role(self, role): + self._db_role = role + def db_delete_role(self, role): + if not self.is_new: + self.db_deleted_role.append(self._db_role) + self._db_role = None + + def __get_db_cause(self): + return self._db_cause + def __set_db_cause(self, cause): + self._db_cause = cause + self.is_dirty = True + db_cause = property(__get_db_cause, __set_db_cause) + def db_add_cause(self, cause): + self._db_cause = cause + def db_change_cause(self, cause): + self._db_cause = cause + def db_delete_cause(self, cause): + if not self.is_new: + self.db_deleted_cause.append(self._db_cause) + self._db_cause = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def __get_db_opm_times(self): + return self._db_opm_times + def __set_db_opm_times(self, opm_times): + self._db_opm_times = opm_times + self.is_dirty = True + db_opm_times = property(__get_db_opm_times, __set_db_opm_times) + def db_get_opm_times(self): + return self._db_opm_times + def db_add_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_change_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_delete_opm_time(self, opm_time): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_time(self, key): + return None + + + +class DBOpmAccounts(object): + + vtType = 'opm_accounts' + + def __init__(self, accounts=None, opm_overlapss=None): + self.db_deleted_accounts = [] + self.db_accounts_id_index = {} + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + for v in self._db_accounts: + self.db_accounts_id_index[v.db_id] = v + self.db_deleted_opm_overlapss = [] + if opm_overlapss is None: + self._db_opm_overlapss = [] + else: + self._db_opm_overlapss = opm_overlapss + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAccounts.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAccounts() + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_opm_overlapss is None: + cp._db_opm_overlapss = [] + else: + cp._db_opm_overlapss = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_overlapss] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_accounts_id_index = dict((v.db_id, v) for v in cp._db_accounts) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAccounts() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccount.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccount.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'opm_overlapss' in class_dict: + res = class_dict['opm_overlapss'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_overlaps(obj) + elif hasattr(old_obj, 'db_opm_overlapss') and old_obj.db_opm_overlapss is not None: + for obj in old_obj.db_opm_overlapss: + new_obj.db_add_opm_overlaps(DBOpmOverlaps.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_overlapss') and hasattr(new_obj, 'db_deleted_opm_overlapss'): + for obj in old_obj.db_deleted_opm_overlapss: + n_obj = DBOpmOverlaps.update_version(obj, trans_dict) + new_obj.db_deleted_opm_overlapss.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_opm_overlapss: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_overlaps(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_opm_overlapss) + if remove: + self.db_deleted_accounts = [] + self.db_deleted_opm_overlapss = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_opm_overlapss: + if child.has_changes(): + return True + return False + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + self.db_accounts_id_index[account.db_id] = account + def db_change_account(self, account): + self.is_dirty = True + found = False + for i in xrange(len(self._db_accounts)): + if self._db_accounts[i].db_id == account.db_id: + self._db_accounts[i] = account + found = True + break + if not found: + self._db_accounts.append(account) + self.db_accounts_id_index[account.db_id] = account + def db_delete_account(self, account): + self.is_dirty = True + for i in xrange(len(self._db_accounts)): + if self._db_accounts[i].db_id == account.db_id: + if not self._db_accounts[i].is_new: + self.db_deleted_accounts.append(self._db_accounts[i]) + del self._db_accounts[i] + break + del self.db_accounts_id_index[account.db_id] + def db_get_account(self, key): + for i in xrange(len(self._db_accounts)): + if self._db_accounts[i].db_id == key: + return self._db_accounts[i] + return None + def db_get_account_by_id(self, key): + return self.db_accounts_id_index[key] + def db_has_account_with_id(self, key): + return key in self.db_accounts_id_index + + def __get_db_opm_overlapss(self): + return self._db_opm_overlapss + def __set_db_opm_overlapss(self, opm_overlapss): + self._db_opm_overlapss = opm_overlapss + self.is_dirty = True + db_opm_overlapss = property(__get_db_opm_overlapss, __set_db_opm_overlapss) + def db_get_opm_overlapss(self): + return self._db_opm_overlapss + def db_add_opm_overlaps(self, opm_overlaps): + self.is_dirty = True + self._db_opm_overlapss.append(opm_overlaps) + def db_change_opm_overlaps(self, opm_overlaps): + self.is_dirty = True + self._db_opm_overlapss.append(opm_overlaps) + def db_delete_opm_overlaps(self, opm_overlaps): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_overlaps(self, key): + return None + + + +class DBRefProvAgent(object): + + vtType = 'ref_prov_agent' + + def __init__(self, prov_ref=None): + self._db_prov_ref = prov_ref + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBRefProvAgent.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBRefProvAgent(prov_ref=self._db_prov_ref) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prov_ref') and ('prov_agent', self._db_prov_ref) in id_remap: + cp._db_prov_ref = id_remap[('prov_agent', self._db_prov_ref)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRefProvAgent() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_ref' in class_dict: + res = class_dict['prov_ref'](old_obj, trans_dict) + new_obj.db_prov_ref = res + elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None: + new_obj.db_prov_ref = old_obj.db_prov_ref + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_prov_ref(self): + return self._db_prov_ref + def __set_db_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + self.is_dirty = True + db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref) + def db_add_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_change_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_delete_prov_ref(self, prov_ref): + self._db_prov_ref = None + + + +class DBPortSpec(object): + + vtType = 'portSpec' + + def __init__(self, id=None, name=None, type=None, optional=None, depth=None, sort_key=None, portSpecItems=None, min_conns=None, max_conns=None): + self._db_id = id + self._db_name = name + self._db_type = type + self._db_optional = optional + self._db_depth = depth + self._db_sort_key = sort_key + self.db_deleted_portSpecItems = [] + self.db_portSpecItems_id_index = {} + if portSpecItems is None: + self._db_portSpecItems = [] + else: + self._db_portSpecItems = portSpecItems + for v in self._db_portSpecItems: + self.db_portSpecItems_id_index[v.db_id] = v + self._db_min_conns = min_conns + self._db_max_conns = max_conns + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPortSpec.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPortSpec(id=self._db_id, + name=self._db_name, + type=self._db_type, + optional=self._db_optional, + depth=self._db_depth, + sort_key=self._db_sort_key, + min_conns=self._db_min_conns, + max_conns=self._db_max_conns) + if self._db_portSpecItems is None: + cp._db_portSpecItems = [] + else: + cp._db_portSpecItems = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecItems] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_portSpecItems_id_index = dict((v.db_id, v) for v in cp._db_portSpecItems) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPortSpec() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'type' in class_dict: + res = class_dict['type'](old_obj, trans_dict) + new_obj.db_type = res + elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None: + new_obj.db_type = old_obj.db_type + if 'optional' in class_dict: + res = class_dict['optional'](old_obj, trans_dict) + new_obj.db_optional = res + elif hasattr(old_obj, 'db_optional') and old_obj.db_optional is not None: + new_obj.db_optional = old_obj.db_optional + if 'depth' in class_dict: + res = class_dict['depth'](old_obj, trans_dict) + new_obj.db_depth = res + elif hasattr(old_obj, 'db_depth') and old_obj.db_depth is not None: + new_obj.db_depth = old_obj.db_depth + if 'sort_key' in class_dict: + res = class_dict['sort_key'](old_obj, trans_dict) + new_obj.db_sort_key = res + elif hasattr(old_obj, 'db_sort_key') and old_obj.db_sort_key is not None: + new_obj.db_sort_key = old_obj.db_sort_key + if 'portSpecItems' in class_dict: + res = class_dict['portSpecItems'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_portSpecItem(obj) + elif hasattr(old_obj, 'db_portSpecItems') and old_obj.db_portSpecItems is not None: + for obj in old_obj.db_portSpecItems: + new_obj.db_add_portSpecItem(DBPortSpecItem.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_portSpecItems') and hasattr(new_obj, 'db_deleted_portSpecItems'): + for obj in old_obj.db_deleted_portSpecItems: + n_obj = DBPortSpecItem.update_version(obj, trans_dict) + new_obj.db_deleted_portSpecItems.append(n_obj) + if 'min_conns' in class_dict: + res = class_dict['min_conns'](old_obj, trans_dict) + new_obj.db_min_conns = res + elif hasattr(old_obj, 'db_min_conns') and old_obj.db_min_conns is not None: + new_obj.db_min_conns = old_obj.db_min_conns + if 'max_conns' in class_dict: + res = class_dict['max_conns'](old_obj, trans_dict) + new_obj.db_max_conns = res + elif hasattr(old_obj, 'db_max_conns') and old_obj.db_max_conns is not None: + new_obj.db_max_conns = old_obj.db_max_conns + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if not for_action: + for child in self.db_portSpecItems: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_portSpecItems) + if remove: + self.db_deleted_portSpecItems = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_portSpecItems: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_type(self): + return self._db_type + def __set_db_type(self, type): + self._db_type = type + self.is_dirty = True + db_type = property(__get_db_type, __set_db_type) + def db_add_type(self, type): + self._db_type = type + def db_change_type(self, type): + self._db_type = type + def db_delete_type(self, type): + self._db_type = None + + def __get_db_optional(self): + return self._db_optional + def __set_db_optional(self, optional): + self._db_optional = optional + self.is_dirty = True + db_optional = property(__get_db_optional, __set_db_optional) + def db_add_optional(self, optional): + self._db_optional = optional + def db_change_optional(self, optional): + self._db_optional = optional + def db_delete_optional(self, optional): + self._db_optional = None + + def __get_db_depth(self): + return self._db_depth + def __set_db_depth(self, depth): + self._db_depth = depth + self.is_dirty = True + db_depth = property(__get_db_depth, __set_db_depth) + def db_add_depth(self, depth): + self._db_depth = depth + def db_change_depth(self, depth): + self._db_depth = depth + def db_delete_depth(self, depth): + self._db_depth = None + + def __get_db_sort_key(self): + return self._db_sort_key + def __set_db_sort_key(self, sort_key): + self._db_sort_key = sort_key + self.is_dirty = True + db_sort_key = property(__get_db_sort_key, __set_db_sort_key) + def db_add_sort_key(self, sort_key): + self._db_sort_key = sort_key + def db_change_sort_key(self, sort_key): + self._db_sort_key = sort_key + def db_delete_sort_key(self, sort_key): + self._db_sort_key = None + + def __get_db_portSpecItems(self): + return self._db_portSpecItems + def __set_db_portSpecItems(self, portSpecItems): + self._db_portSpecItems = portSpecItems + self.is_dirty = True + db_portSpecItems = property(__get_db_portSpecItems, __set_db_portSpecItems) + def db_get_portSpecItems(self): + return self._db_portSpecItems + def db_add_portSpecItem(self, portSpecItem): + self.is_dirty = True + self._db_portSpecItems.append(portSpecItem) + self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem + def db_change_portSpecItem(self, portSpecItem): + self.is_dirty = True + found = False + for i in xrange(len(self._db_portSpecItems)): + if self._db_portSpecItems[i].db_id == portSpecItem.db_id: + self._db_portSpecItems[i] = portSpecItem + found = True + break + if not found: + self._db_portSpecItems.append(portSpecItem) + self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem + def db_delete_portSpecItem(self, portSpecItem): + self.is_dirty = True + for i in xrange(len(self._db_portSpecItems)): + if self._db_portSpecItems[i].db_id == portSpecItem.db_id: + if not self._db_portSpecItems[i].is_new: + self.db_deleted_portSpecItems.append(self._db_portSpecItems[i]) + del self._db_portSpecItems[i] + break + del self.db_portSpecItems_id_index[portSpecItem.db_id] + def db_get_portSpecItem(self, key): + for i in xrange(len(self._db_portSpecItems)): + if self._db_portSpecItems[i].db_id == key: + return self._db_portSpecItems[i] + return None + def db_get_portSpecItem_by_id(self, key): + return self.db_portSpecItems_id_index[key] + def db_has_portSpecItem_with_id(self, key): + return key in self.db_portSpecItems_id_index + + def __get_db_min_conns(self): + return self._db_min_conns + def __set_db_min_conns(self, min_conns): + self._db_min_conns = min_conns + self.is_dirty = True + db_min_conns = property(__get_db_min_conns, __set_db_min_conns) + def db_add_min_conns(self, min_conns): + self._db_min_conns = min_conns + def db_change_min_conns(self, min_conns): + self._db_min_conns = min_conns + def db_delete_min_conns(self, min_conns): + self._db_min_conns = None + + def __get_db_max_conns(self): + return self._db_max_conns + def __set_db_max_conns(self, max_conns): + self._db_max_conns = max_conns + self.is_dirty = True + db_max_conns = property(__get_db_max_conns, __set_db_max_conns) + def db_add_max_conns(self, max_conns): + self._db_max_conns = max_conns + def db_change_max_conns(self, max_conns): + self._db_max_conns = max_conns + def db_delete_max_conns(self, max_conns): + self._db_max_conns = None + + def getPrimaryKey(self): + return self._db_id + +class DBModule(object): + + vtType = 'module' + + def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None, controlParameters=None, portSpecs=None): + self._db_id = id + self._db_cache = cache + self._db_name = name + self._db_namespace = namespace + self._db_package = package + self._db_version = version + self.db_deleted_location = [] + self._db_location = location + self.db_deleted_functions = [] + self.db_functions_id_index = {} + if functions is None: + self._db_functions = [] + else: + self._db_functions = functions + for v in self._db_functions: + self.db_functions_id_index[v.db_id] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.db_deleted_controlParameters = [] + self.db_controlParameters_id_index = {} + self.db_controlParameters_name_index = {} + if controlParameters is None: + self._db_controlParameters = [] + else: + self._db_controlParameters = controlParameters + for v in self._db_controlParameters: + self.db_controlParameters_id_index[v.db_id] = v + self.db_controlParameters_name_index[v.db_name] = v + self.db_deleted_portSpecs = [] + self.db_portSpecs_id_index = {} + self.db_portSpecs_name_index = {} + if portSpecs is None: + self._db_portSpecs = [] + else: + self._db_portSpecs = portSpecs + for v in self._db_portSpecs: + self.db_portSpecs_id_index[v.db_id] = v + self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBModule.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBModule(id=self._db_id, + cache=self._db_cache, + name=self._db_name, + namespace=self._db_namespace, + package=self._db_package, + version=self._db_version) + if self._db_location is not None: + cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap) + if self._db_functions is None: + cp._db_functions = [] + else: + cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_controlParameters is None: + cp._db_controlParameters = [] + else: + cp._db_controlParameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_controlParameters] + if self._db_portSpecs is None: + cp._db_portSpecs = [] + else: + cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + cp.db_controlParameters_id_index = dict((v.db_id, v) for v in cp._db_controlParameters) + cp.db_controlParameters_name_index = dict((v.db_name, v) for v in cp._db_controlParameters) + cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs) + cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBModule() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'cache' in class_dict: + res = class_dict['cache'](old_obj, trans_dict) + new_obj.db_cache = res + elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None: + new_obj.db_cache = old_obj.db_cache + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'location' in class_dict: + res = class_dict['location'](old_obj, trans_dict) + new_obj.db_location = res + elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None: + obj = old_obj.db_location + new_obj.db_add_location(DBLocation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'): + for obj in old_obj.db_deleted_location: + n_obj = DBLocation.update_version(obj, trans_dict) + new_obj.db_deleted_location.append(n_obj) + if 'functions' in class_dict: + res = class_dict['functions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_function(obj) + elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None: + for obj in old_obj.db_functions: + new_obj.db_add_function(DBFunction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'): + for obj in old_obj.db_deleted_functions: + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_functions.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'controlParameters' in class_dict: + res = class_dict['controlParameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_controlParameter(obj) + elif hasattr(old_obj, 'db_controlParameters') and old_obj.db_controlParameters is not None: + for obj in old_obj.db_controlParameters: + new_obj.db_add_controlParameter(DBControlParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_controlParameters') and hasattr(new_obj, 'db_deleted_controlParameters'): + for obj in old_obj.db_deleted_controlParameters: + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_controlParameters.append(n_obj) + if 'portSpecs' in class_dict: + res = class_dict['portSpecs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_portSpec(obj) + elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None: + for obj in old_obj.db_portSpecs: + new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'): + for obj in old_obj.db_deleted_portSpecs: + n_obj = DBPortSpec.update_version(obj, trans_dict) + new_obj.db_deleted_portSpecs.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_location is not None: + children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_location = None + to_del = [] + for child in self.db_functions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_function(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_controlParameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_controlParameter(child) + to_del = [] + for child in self.db_portSpecs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_portSpec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_location) + children.extend(self.db_deleted_functions) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_controlParameters) + children.extend(self.db_deleted_portSpecs) + if remove: + self.db_deleted_location = [] + self.db_deleted_functions = [] + self.db_deleted_annotations = [] + self.db_deleted_controlParameters = [] + self.db_deleted_portSpecs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_location is not None and self._db_location.has_changes(): + return True + for child in self._db_functions: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_controlParameters: + if child.has_changes(): + return True + for child in self._db_portSpecs: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_cache(self): + return self._db_cache + def __set_db_cache(self, cache): + self._db_cache = cache + self.is_dirty = True + db_cache = property(__get_db_cache, __set_db_cache) + def db_add_cache(self, cache): + self._db_cache = cache + def db_change_cache(self, cache): + self._db_cache = cache + def db_delete_cache(self, cache): + self._db_cache = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_location(self): + return self._db_location + def __set_db_location(self, location): + self._db_location = location + self.is_dirty = True + db_location = property(__get_db_location, __set_db_location) + def db_add_location(self, location): + self._db_location = location + def db_change_location(self, location): + self._db_location = location + def db_delete_location(self, location): + if not self.is_new: + self.db_deleted_location.append(self._db_location) + self._db_location = None + + def __get_db_functions(self): + return self._db_functions + def __set_db_functions(self, functions): + self._db_functions = functions + self.is_dirty = True + db_functions = property(__get_db_functions, __set_db_functions) + def db_get_functions(self): + return self._db_functions + def db_add_function(self, function): + self.is_dirty = True + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_change_function(self, function): + self.is_dirty = True + found = False + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + self._db_functions[i] = function + found = True + break + if not found: + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_delete_function(self, function): + self.is_dirty = True + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + if not self._db_functions[i].is_new: + self.db_deleted_functions.append(self._db_functions[i]) + del self._db_functions[i] + break + del self.db_functions_id_index[function.db_id] + def db_get_function(self, key): + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == key: + return self._db_functions[i] + return None + def db_get_function_by_id(self, key): + return self.db_functions_id_index[key] + def db_has_function_with_id(self, key): + return key in self.db_functions_id_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def __get_db_controlParameters(self): + return self._db_controlParameters + def __set_db_controlParameters(self, controlParameters): + self._db_controlParameters = controlParameters + self.is_dirty = True + db_controlParameters = property(__get_db_controlParameters, __set_db_controlParameters) + def db_get_controlParameters(self): + return self._db_controlParameters + def db_add_controlParameter(self, controlParameter): + self.is_dirty = True + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_change_controlParameter(self, controlParameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + self._db_controlParameters[i] = controlParameter + found = True + break + if not found: + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_delete_controlParameter(self, controlParameter): + self.is_dirty = True + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + if not self._db_controlParameters[i].is_new: + self.db_deleted_controlParameters.append(self._db_controlParameters[i]) + del self._db_controlParameters[i] + break + del self.db_controlParameters_id_index[controlParameter.db_id] + del self.db_controlParameters_name_index[controlParameter.db_name] + def db_get_controlParameter(self, key): + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == key: + return self._db_controlParameters[i] + return None + def db_get_controlParameter_by_id(self, key): + return self.db_controlParameters_id_index[key] + def db_has_controlParameter_with_id(self, key): + return key in self.db_controlParameters_id_index + def db_get_controlParameter_by_name(self, key): + return self.db_controlParameters_name_index[key] + def db_has_controlParameter_with_name(self, key): + return key in self.db_controlParameters_name_index + + def __get_db_portSpecs(self): + return self._db_portSpecs + def __set_db_portSpecs(self, portSpecs): + self._db_portSpecs = portSpecs + self.is_dirty = True + db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs) + def db_get_portSpecs(self): + return self._db_portSpecs + def db_add_portSpec(self, portSpec): + self.is_dirty = True + self._db_portSpecs.append(portSpec) + self.db_portSpecs_id_index[portSpec.db_id] = portSpec + self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec + def db_change_portSpec(self, portSpec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == portSpec.db_id: + self._db_portSpecs[i] = portSpec + found = True + break + if not found: + self._db_portSpecs.append(portSpec) + self.db_portSpecs_id_index[portSpec.db_id] = portSpec + self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec + def db_delete_portSpec(self, portSpec): + self.is_dirty = True + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == portSpec.db_id: + if not self._db_portSpecs[i].is_new: + self.db_deleted_portSpecs.append(self._db_portSpecs[i]) + del self._db_portSpecs[i] + break + del self.db_portSpecs_id_index[portSpec.db_id] + del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] + def db_get_portSpec(self, key): + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == key: + return self._db_portSpecs[i] + return None + def db_get_portSpec_by_id(self, key): + return self.db_portSpecs_id_index[key] + def db_has_portSpec_with_id(self, key): + return key in self.db_portSpecs_id_index + def db_get_portSpec_by_name(self, key): + return self.db_portSpecs_name_index[key] + def db_has_portSpec_with_name(self, key): + return key in self.db_portSpecs_name_index + + def getPrimaryKey(self): + return self._db_id + +class DBModuleDescriptor(object): + + vtType = 'module_descriptor' + + def __init__(self, id=None, name=None, package=None, namespace=None, package_version=None, version=None, base_descriptor_id=None, portSpecs=None): + self._db_id = id + self._db_name = name + self._db_package = package + self._db_namespace = namespace + self._db_package_version = package_version + self._db_version = version + self._db_base_descriptor_id = base_descriptor_id + self.db_deleted_portSpecs = [] + self.db_portSpecs_id_index = {} + self.db_portSpecs_name_index = {} + if portSpecs is None: + self._db_portSpecs = [] + else: + self._db_portSpecs = portSpecs + for v in self._db_portSpecs: + self.db_portSpecs_id_index[v.db_id] = v + self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBModuleDescriptor.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBModuleDescriptor(id=self._db_id, + name=self._db_name, + package=self._db_package, + namespace=self._db_namespace, + package_version=self._db_package_version, + version=self._db_version, + base_descriptor_id=self._db_base_descriptor_id) + if self._db_portSpecs is None: + cp._db_portSpecs = [] + else: + cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_base_descriptor_id') and ('module_descriptor', self._db_base_descriptor_id) in id_remap: + cp._db_base_descriptor_id = id_remap[('module_descriptor', self._db_base_descriptor_id)] + + # recreate indices and set flags + cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs) + cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBModuleDescriptor() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'package_version' in class_dict: + res = class_dict['package_version'](old_obj, trans_dict) + new_obj.db_package_version = res + elif hasattr(old_obj, 'db_package_version') and old_obj.db_package_version is not None: + new_obj.db_package_version = old_obj.db_package_version + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'base_descriptor_id' in class_dict: + res = class_dict['base_descriptor_id'](old_obj, trans_dict) + new_obj.db_base_descriptor_id = res + elif hasattr(old_obj, 'db_base_descriptor_id') and old_obj.db_base_descriptor_id is not None: + new_obj.db_base_descriptor_id = old_obj.db_base_descriptor_id + if 'portSpecs' in class_dict: + res = class_dict['portSpecs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_portSpec(obj) + elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None: + for obj in old_obj.db_portSpecs: + new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'): + for obj in old_obj.db_deleted_portSpecs: + n_obj = DBPortSpec.update_version(obj, trans_dict) + new_obj.db_deleted_portSpecs.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_portSpecs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_portSpec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_portSpecs) + if remove: + self.db_deleted_portSpecs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_portSpecs: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_package_version(self): + return self._db_package_version + def __set_db_package_version(self, package_version): + self._db_package_version = package_version + self.is_dirty = True + db_package_version = property(__get_db_package_version, __set_db_package_version) + def db_add_package_version(self, package_version): + self._db_package_version = package_version + def db_change_package_version(self, package_version): + self._db_package_version = package_version + def db_delete_package_version(self, package_version): + self._db_package_version = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_base_descriptor_id(self): + return self._db_base_descriptor_id + def __set_db_base_descriptor_id(self, base_descriptor_id): + self._db_base_descriptor_id = base_descriptor_id + self.is_dirty = True + db_base_descriptor_id = property(__get_db_base_descriptor_id, __set_db_base_descriptor_id) + def db_add_base_descriptor_id(self, base_descriptor_id): + self._db_base_descriptor_id = base_descriptor_id + def db_change_base_descriptor_id(self, base_descriptor_id): + self._db_base_descriptor_id = base_descriptor_id + def db_delete_base_descriptor_id(self, base_descriptor_id): + self._db_base_descriptor_id = None + + def __get_db_portSpecs(self): + return self._db_portSpecs + def __set_db_portSpecs(self, portSpecs): + self._db_portSpecs = portSpecs + self.is_dirty = True + db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs) + def db_get_portSpecs(self): + return self._db_portSpecs + def db_add_portSpec(self, portSpec): + self.is_dirty = True + self._db_portSpecs.append(portSpec) + self.db_portSpecs_id_index[portSpec.db_id] = portSpec + self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec + def db_change_portSpec(self, portSpec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == portSpec.db_id: + self._db_portSpecs[i] = portSpec + found = True + break + if not found: + self._db_portSpecs.append(portSpec) + self.db_portSpecs_id_index[portSpec.db_id] = portSpec + self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec + def db_delete_portSpec(self, portSpec): + self.is_dirty = True + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == portSpec.db_id: + if not self._db_portSpecs[i].is_new: + self.db_deleted_portSpecs.append(self._db_portSpecs[i]) + del self._db_portSpecs[i] + break + del self.db_portSpecs_id_index[portSpec.db_id] + del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] + def db_get_portSpec(self, key): + for i in xrange(len(self._db_portSpecs)): + if self._db_portSpecs[i].db_id == key: + return self._db_portSpecs[i] + return None + def db_get_portSpec_by_id(self, key): + return self.db_portSpecs_id_index[key] + def db_has_portSpec_with_id(self, key): + return key in self.db_portSpecs_id_index + def db_get_portSpec_by_name(self, key): + return self.db_portSpecs_name_index[key] + def db_has_portSpec_with_name(self, key): + return key in self.db_portSpecs_name_index + + def getPrimaryKey(self): + return self._db_id + +class DBTag(object): + + vtType = 'tag' + + def __init__(self, id=None, name=None): + self._db_id = id + self._db_name = name + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBTag.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBTag(id=self._db_id, + name=self._db_name) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('action', self._db_id) in id_remap: + cp._db_id = id_remap[('action', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBTag() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmRole(object): + + vtType = 'opm_role' + + def __init__(self, value=None): + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmRole.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmRole(value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmRole() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + + +class DBProvDocument(object): + + vtType = 'prov_document' + + def __init__(self, prov_entitys=None, prov_activitys=None, prov_agents=None, vt_connections=None, prov_usages=None, prov_generations=None, prov_associations=None): + self.db_deleted_prov_entitys = [] + self.db_prov_entitys_id_index = {} + if prov_entitys is None: + self._db_prov_entitys = [] + else: + self._db_prov_entitys = prov_entitys + for v in self._db_prov_entitys: + self.db_prov_entitys_id_index[v.db_id] = v + self.db_deleted_prov_activitys = [] + self.db_prov_activitys_id_index = {} + if prov_activitys is None: + self._db_prov_activitys = [] + else: + self._db_prov_activitys = prov_activitys + for v in self._db_prov_activitys: + self.db_prov_activitys_id_index[v.db_id] = v + self.db_deleted_prov_agents = [] + self.db_prov_agents_id_index = {} + if prov_agents is None: + self._db_prov_agents = [] + else: + self._db_prov_agents = prov_agents + for v in self._db_prov_agents: + self.db_prov_agents_id_index[v.db_id] = v + self.db_deleted_vt_connections = [] + self.db_vt_connections_id_index = {} + if vt_connections is None: + self._db_vt_connections = [] + else: + self._db_vt_connections = vt_connections + for v in self._db_vt_connections: + self.db_vt_connections_id_index[v.db_id] = v + self.db_deleted_prov_usages = [] + if prov_usages is None: + self._db_prov_usages = [] + else: + self._db_prov_usages = prov_usages + self.db_deleted_prov_generations = [] + if prov_generations is None: + self._db_prov_generations = [] + else: + self._db_prov_generations = prov_generations + self.db_deleted_prov_associations = [] + if prov_associations is None: + self._db_prov_associations = [] + else: + self._db_prov_associations = prov_associations + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvDocument.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvDocument() + if self._db_prov_entitys is None: + cp._db_prov_entitys = [] + else: + cp._db_prov_entitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_entitys] + if self._db_prov_activitys is None: + cp._db_prov_activitys = [] + else: + cp._db_prov_activitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_activitys] + if self._db_prov_agents is None: + cp._db_prov_agents = [] + else: + cp._db_prov_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_agents] + if self._db_vt_connections is None: + cp._db_vt_connections = [] + else: + cp._db_vt_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vt_connections] + if self._db_prov_usages is None: + cp._db_prov_usages = [] + else: + cp._db_prov_usages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_usages] + if self._db_prov_generations is None: + cp._db_prov_generations = [] + else: + cp._db_prov_generations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_generations] + if self._db_prov_associations is None: + cp._db_prov_associations = [] + else: + cp._db_prov_associations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_associations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_prov_entitys_id_index = dict((v.db_id, v) for v in cp._db_prov_entitys) + cp.db_prov_activitys_id_index = dict((v.db_id, v) for v in cp._db_prov_activitys) + cp.db_prov_agents_id_index = dict((v.db_id, v) for v in cp._db_prov_agents) + cp.db_vt_connections_id_index = dict((v.db_id, v) for v in cp._db_vt_connections) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvDocument() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_entitys' in class_dict: + res = class_dict['prov_entitys'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_entity(obj) + elif hasattr(old_obj, 'db_prov_entitys') and old_obj.db_prov_entitys is not None: + for obj in old_obj.db_prov_entitys: + new_obj.db_add_prov_entity(DBProvEntity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_entitys') and hasattr(new_obj, 'db_deleted_prov_entitys'): + for obj in old_obj.db_deleted_prov_entitys: + n_obj = DBProvEntity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_entitys.append(n_obj) + if 'prov_activitys' in class_dict: + res = class_dict['prov_activitys'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_activity(obj) + elif hasattr(old_obj, 'db_prov_activitys') and old_obj.db_prov_activitys is not None: + for obj in old_obj.db_prov_activitys: + new_obj.db_add_prov_activity(DBProvActivity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_activitys') and hasattr(new_obj, 'db_deleted_prov_activitys'): + for obj in old_obj.db_deleted_prov_activitys: + n_obj = DBProvActivity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_activitys.append(n_obj) + if 'prov_agents' in class_dict: + res = class_dict['prov_agents'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_agent(obj) + elif hasattr(old_obj, 'db_prov_agents') and old_obj.db_prov_agents is not None: + for obj in old_obj.db_prov_agents: + new_obj.db_add_prov_agent(DBProvAgent.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_agents') and hasattr(new_obj, 'db_deleted_prov_agents'): + for obj in old_obj.db_deleted_prov_agents: + n_obj = DBProvAgent.update_version(obj, trans_dict) + new_obj.db_deleted_prov_agents.append(n_obj) + if 'vt_connections' in class_dict: + res = class_dict['vt_connections'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_vt_connection(obj) + elif hasattr(old_obj, 'db_vt_connections') and old_obj.db_vt_connections is not None: + for obj in old_obj.db_vt_connections: + new_obj.db_add_vt_connection(DBVtConnection.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_vt_connections') and hasattr(new_obj, 'db_deleted_vt_connections'): + for obj in old_obj.db_deleted_vt_connections: + n_obj = DBVtConnection.update_version(obj, trans_dict) + new_obj.db_deleted_vt_connections.append(n_obj) + if 'prov_usages' in class_dict: + res = class_dict['prov_usages'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_usage(obj) + elif hasattr(old_obj, 'db_prov_usages') and old_obj.db_prov_usages is not None: + for obj in old_obj.db_prov_usages: + new_obj.db_add_prov_usage(DBProvUsage.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_usages') and hasattr(new_obj, 'db_deleted_prov_usages'): + for obj in old_obj.db_deleted_prov_usages: + n_obj = DBProvUsage.update_version(obj, trans_dict) + new_obj.db_deleted_prov_usages.append(n_obj) + if 'prov_generations' in class_dict: + res = class_dict['prov_generations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_generation(obj) + elif hasattr(old_obj, 'db_prov_generations') and old_obj.db_prov_generations is not None: + for obj in old_obj.db_prov_generations: + new_obj.db_add_prov_generation(DBProvGeneration.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_generations') and hasattr(new_obj, 'db_deleted_prov_generations'): + for obj in old_obj.db_deleted_prov_generations: + n_obj = DBProvGeneration.update_version(obj, trans_dict) + new_obj.db_deleted_prov_generations.append(n_obj) + if 'prov_associations' in class_dict: + res = class_dict['prov_associations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_prov_association(obj) + elif hasattr(old_obj, 'db_prov_associations') and old_obj.db_prov_associations is not None: + for obj in old_obj.db_prov_associations: + new_obj.db_add_prov_association(DBProvAssociation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_associations') and hasattr(new_obj, 'db_deleted_prov_associations'): + for obj in old_obj.db_deleted_prov_associations: + n_obj = DBProvAssociation.update_version(obj, trans_dict) + new_obj.db_deleted_prov_associations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_prov_entitys: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_entity(child) + to_del = [] + for child in self.db_prov_activitys: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_activity(child) + to_del = [] + for child in self.db_prov_agents: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_agent(child) + to_del = [] + for child in self.db_vt_connections: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_vt_connection(child) + to_del = [] + for child in self.db_prov_usages: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_usage(child) + to_del = [] + for child in self.db_prov_generations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_generation(child) + to_del = [] + for child in self.db_prov_associations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_prov_association(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_prov_entitys) + children.extend(self.db_deleted_prov_activitys) + children.extend(self.db_deleted_prov_agents) + children.extend(self.db_deleted_vt_connections) + children.extend(self.db_deleted_prov_usages) + children.extend(self.db_deleted_prov_generations) + children.extend(self.db_deleted_prov_associations) + if remove: + self.db_deleted_prov_entitys = [] + self.db_deleted_prov_activitys = [] + self.db_deleted_prov_agents = [] + self.db_deleted_vt_connections = [] + self.db_deleted_prov_usages = [] + self.db_deleted_prov_generations = [] + self.db_deleted_prov_associations = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_prov_entitys: + if child.has_changes(): + return True + for child in self._db_prov_activitys: + if child.has_changes(): + return True + for child in self._db_prov_agents: + if child.has_changes(): + return True + for child in self._db_vt_connections: + if child.has_changes(): + return True + for child in self._db_prov_usages: + if child.has_changes(): + return True + for child in self._db_prov_generations: + if child.has_changes(): + return True + for child in self._db_prov_associations: + if child.has_changes(): + return True + return False + def __get_db_prov_entitys(self): + return self._db_prov_entitys + def __set_db_prov_entitys(self, prov_entitys): + self._db_prov_entitys = prov_entitys + self.is_dirty = True + db_prov_entitys = property(__get_db_prov_entitys, __set_db_prov_entitys) + def db_get_prov_entitys(self): + return self._db_prov_entitys + def db_add_prov_entity(self, prov_entity): + self.is_dirty = True + self._db_prov_entitys.append(prov_entity) + self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity + def db_change_prov_entity(self, prov_entity): + self.is_dirty = True + found = False + for i in xrange(len(self._db_prov_entitys)): + if self._db_prov_entitys[i].db_id == prov_entity.db_id: + self._db_prov_entitys[i] = prov_entity + found = True + break + if not found: + self._db_prov_entitys.append(prov_entity) + self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity + def db_delete_prov_entity(self, prov_entity): + self.is_dirty = True + for i in xrange(len(self._db_prov_entitys)): + if self._db_prov_entitys[i].db_id == prov_entity.db_id: + if not self._db_prov_entitys[i].is_new: + self.db_deleted_prov_entitys.append(self._db_prov_entitys[i]) + del self._db_prov_entitys[i] + break + del self.db_prov_entitys_id_index[prov_entity.db_id] + def db_get_prov_entity(self, key): + for i in xrange(len(self._db_prov_entitys)): + if self._db_prov_entitys[i].db_id == key: + return self._db_prov_entitys[i] + return None + def db_get_prov_entity_by_id(self, key): + return self.db_prov_entitys_id_index[key] + def db_has_prov_entity_with_id(self, key): + return key in self.db_prov_entitys_id_index + + def __get_db_prov_activitys(self): + return self._db_prov_activitys + def __set_db_prov_activitys(self, prov_activitys): + self._db_prov_activitys = prov_activitys + self.is_dirty = True + db_prov_activitys = property(__get_db_prov_activitys, __set_db_prov_activitys) + def db_get_prov_activitys(self): + return self._db_prov_activitys + def db_add_prov_activity(self, prov_activity): + self.is_dirty = True + self._db_prov_activitys.append(prov_activity) + self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity + def db_change_prov_activity(self, prov_activity): + self.is_dirty = True + found = False + for i in xrange(len(self._db_prov_activitys)): + if self._db_prov_activitys[i].db_id == prov_activity.db_id: + self._db_prov_activitys[i] = prov_activity + found = True + break + if not found: + self._db_prov_activitys.append(prov_activity) + self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity + def db_delete_prov_activity(self, prov_activity): + self.is_dirty = True + for i in xrange(len(self._db_prov_activitys)): + if self._db_prov_activitys[i].db_id == prov_activity.db_id: + if not self._db_prov_activitys[i].is_new: + self.db_deleted_prov_activitys.append(self._db_prov_activitys[i]) + del self._db_prov_activitys[i] + break + del self.db_prov_activitys_id_index[prov_activity.db_id] + def db_get_prov_activity(self, key): + for i in xrange(len(self._db_prov_activitys)): + if self._db_prov_activitys[i].db_id == key: + return self._db_prov_activitys[i] + return None + def db_get_prov_activity_by_id(self, key): + return self.db_prov_activitys_id_index[key] + def db_has_prov_activity_with_id(self, key): + return key in self.db_prov_activitys_id_index + + def __get_db_prov_agents(self): + return self._db_prov_agents + def __set_db_prov_agents(self, prov_agents): + self._db_prov_agents = prov_agents + self.is_dirty = True + db_prov_agents = property(__get_db_prov_agents, __set_db_prov_agents) + def db_get_prov_agents(self): + return self._db_prov_agents + def db_add_prov_agent(self, prov_agent): + self.is_dirty = True + self._db_prov_agents.append(prov_agent) + self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent + def db_change_prov_agent(self, prov_agent): + self.is_dirty = True + found = False + for i in xrange(len(self._db_prov_agents)): + if self._db_prov_agents[i].db_id == prov_agent.db_id: + self._db_prov_agents[i] = prov_agent + found = True + break + if not found: + self._db_prov_agents.append(prov_agent) + self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent + def db_delete_prov_agent(self, prov_agent): + self.is_dirty = True + for i in xrange(len(self._db_prov_agents)): + if self._db_prov_agents[i].db_id == prov_agent.db_id: + if not self._db_prov_agents[i].is_new: + self.db_deleted_prov_agents.append(self._db_prov_agents[i]) + del self._db_prov_agents[i] + break + del self.db_prov_agents_id_index[prov_agent.db_id] + def db_get_prov_agent(self, key): + for i in xrange(len(self._db_prov_agents)): + if self._db_prov_agents[i].db_id == key: + return self._db_prov_agents[i] + return None + def db_get_prov_agent_by_id(self, key): + return self.db_prov_agents_id_index[key] + def db_has_prov_agent_with_id(self, key): + return key in self.db_prov_agents_id_index + + def __get_db_vt_connections(self): + return self._db_vt_connections + def __set_db_vt_connections(self, vt_connections): + self._db_vt_connections = vt_connections + self.is_dirty = True + db_vt_connections = property(__get_db_vt_connections, __set_db_vt_connections) + def db_get_vt_connections(self): + return self._db_vt_connections + def db_add_vt_connection(self, vt_connection): + self.is_dirty = True + self._db_vt_connections.append(vt_connection) + self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection + def db_change_vt_connection(self, vt_connection): + self.is_dirty = True + found = False + for i in xrange(len(self._db_vt_connections)): + if self._db_vt_connections[i].db_id == vt_connection.db_id: + self._db_vt_connections[i] = vt_connection + found = True + break + if not found: + self._db_vt_connections.append(vt_connection) + self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection + def db_delete_vt_connection(self, vt_connection): + self.is_dirty = True + for i in xrange(len(self._db_vt_connections)): + if self._db_vt_connections[i].db_id == vt_connection.db_id: + if not self._db_vt_connections[i].is_new: + self.db_deleted_vt_connections.append(self._db_vt_connections[i]) + del self._db_vt_connections[i] + break + del self.db_vt_connections_id_index[vt_connection.db_id] + def db_get_vt_connection(self, key): + for i in xrange(len(self._db_vt_connections)): + if self._db_vt_connections[i].db_id == key: + return self._db_vt_connections[i] + return None + def db_get_vt_connection_by_id(self, key): + return self.db_vt_connections_id_index[key] + def db_has_vt_connection_with_id(self, key): + return key in self.db_vt_connections_id_index + + def __get_db_prov_usages(self): + return self._db_prov_usages + def __set_db_prov_usages(self, prov_usages): + self._db_prov_usages = prov_usages + self.is_dirty = True + db_prov_usages = property(__get_db_prov_usages, __set_db_prov_usages) + def db_get_prov_usages(self): + return self._db_prov_usages + def db_add_prov_usage(self, prov_usage): + self.is_dirty = True + self._db_prov_usages.append(prov_usage) + def db_change_prov_usage(self, prov_usage): + self.is_dirty = True + self._db_prov_usages.append(prov_usage) + def db_delete_prov_usage(self, prov_usage): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_prov_usage(self, key): + return None + + def __get_db_prov_generations(self): + return self._db_prov_generations + def __set_db_prov_generations(self, prov_generations): + self._db_prov_generations = prov_generations + self.is_dirty = True + db_prov_generations = property(__get_db_prov_generations, __set_db_prov_generations) + def db_get_prov_generations(self): + return self._db_prov_generations + def db_add_prov_generation(self, prov_generation): + self.is_dirty = True + self._db_prov_generations.append(prov_generation) + def db_change_prov_generation(self, prov_generation): + self.is_dirty = True + self._db_prov_generations.append(prov_generation) + def db_delete_prov_generation(self, prov_generation): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_prov_generation(self, key): + return None + + def __get_db_prov_associations(self): + return self._db_prov_associations + def __set_db_prov_associations(self, prov_associations): + self._db_prov_associations = prov_associations + self.is_dirty = True + db_prov_associations = property(__get_db_prov_associations, __set_db_prov_associations) + def db_get_prov_associations(self): + return self._db_prov_associations + def db_add_prov_association(self, prov_association): + self.is_dirty = True + self._db_prov_associations.append(prov_association) + def db_change_prov_association(self, prov_association): + self.is_dirty = True + self._db_prov_associations.append(prov_association) + def db_delete_prov_association(self, prov_association): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_prov_association(self, key): + return None + + + +class DBOpmAccount(object): + + vtType = 'opm_account' + + def __init__(self, id=None, value=None): + self._db_id = id + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAccount.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAccount(id=self._db_id, + value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAccount() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmProcesses(object): + + vtType = 'opm_processes' + + def __init__(self, processs=None): + self.db_deleted_processs = [] + self.db_processs_id_index = {} + if processs is None: + self._db_processs = [] + else: + self._db_processs = processs + for v in self._db_processs: + self.db_processs_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmProcesses.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmProcesses() + if self._db_processs is None: + cp._db_processs = [] + else: + cp._db_processs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_processs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_processs_id_index = dict((v.db_id, v) for v in cp._db_processs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmProcesses() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'processs' in class_dict: + res = class_dict['processs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_process(obj) + elif hasattr(old_obj, 'db_processs') and old_obj.db_processs is not None: + for obj in old_obj.db_processs: + new_obj.db_add_process(DBOpmProcess.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_processs') and hasattr(new_obj, 'db_deleted_processs'): + for obj in old_obj.db_deleted_processs: + n_obj = DBOpmProcess.update_version(obj, trans_dict) + new_obj.db_deleted_processs.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_processs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_process(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_processs) + if remove: + self.db_deleted_processs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_processs: + if child.has_changes(): + return True + return False + def __get_db_processs(self): + return self._db_processs + def __set_db_processs(self, processs): + self._db_processs = processs + self.is_dirty = True + db_processs = property(__get_db_processs, __set_db_processs) + def db_get_processs(self): + return self._db_processs + def db_add_process(self, process): + self.is_dirty = True + self._db_processs.append(process) + self.db_processs_id_index[process.db_id] = process + def db_change_process(self, process): + self.is_dirty = True + found = False + for i in xrange(len(self._db_processs)): + if self._db_processs[i].db_id == process.db_id: + self._db_processs[i] = process + found = True + break + if not found: + self._db_processs.append(process) + self.db_processs_id_index[process.db_id] = process + def db_delete_process(self, process): + self.is_dirty = True + for i in xrange(len(self._db_processs)): + if self._db_processs[i].db_id == process.db_id: + if not self._db_processs[i].is_new: + self.db_deleted_processs.append(self._db_processs[i]) + del self._db_processs[i] + break + del self.db_processs_id_index[process.db_id] + def db_get_process(self, key): + for i in xrange(len(self._db_processs)): + if self._db_processs[i].db_id == key: + return self._db_processs[i] + return None + def db_get_process_by_id(self, key): + return self.db_processs_id_index[key] + def db_has_process_with_id(self, key): + return key in self.db_processs_id_index + + + +class DBRefProvActivity(object): + + vtType = 'ref_prov_activity' + + def __init__(self, prov_ref=None): + self._db_prov_ref = prov_ref + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBRefProvActivity.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBRefProvActivity(prov_ref=self._db_prov_ref) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prov_ref') and ('prov_activity', self._db_prov_ref) in id_remap: + cp._db_prov_ref = id_remap[('prov_activity', self._db_prov_ref)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRefProvActivity() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_ref' in class_dict: + res = class_dict['prov_ref'](old_obj, trans_dict) + new_obj.db_prov_ref = res + elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None: + new_obj.db_prov_ref = old_obj.db_prov_ref + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_prov_ref(self): + return self._db_prov_ref + def __set_db_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + self.is_dirty = True + db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref) + def db_add_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_change_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_delete_prov_ref(self, prov_ref): + self._db_prov_ref = None + + + +class DBOpmAccountId(object): + + vtType = 'opm_account_id' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAccountId.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAccountId(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_account', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_account', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAccountId() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBPort(object): + + vtType = 'port' + + def __init__(self, id=None, type=None, moduleId=None, moduleName=None, name=None, signature=None): + self._db_id = id + self._db_type = type + self._db_moduleId = moduleId + self._db_moduleName = moduleName + self._db_name = name + self._db_signature = signature + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPort.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPort(id=self._db_id, + type=self._db_type, + moduleId=self._db_moduleId, + moduleName=self._db_moduleName, + name=self._db_name, + signature=self._db_signature) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_moduleId') and ('module', self._db_moduleId) in id_remap: + cp._db_moduleId = id_remap[('module', self._db_moduleId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPort() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'type' in class_dict: + res = class_dict['type'](old_obj, trans_dict) + new_obj.db_type = res + elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None: + new_obj.db_type = old_obj.db_type + if 'moduleId' in class_dict: + res = class_dict['moduleId'](old_obj, trans_dict) + new_obj.db_moduleId = res + elif hasattr(old_obj, 'db_moduleId') and old_obj.db_moduleId is not None: + new_obj.db_moduleId = old_obj.db_moduleId + if 'moduleName' in class_dict: + res = class_dict['moduleName'](old_obj, trans_dict) + new_obj.db_moduleName = res + elif hasattr(old_obj, 'db_moduleName') and old_obj.db_moduleName is not None: + new_obj.db_moduleName = old_obj.db_moduleName + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'signature' in class_dict: + res = class_dict['signature'](old_obj, trans_dict) + new_obj.db_signature = res + elif hasattr(old_obj, 'db_signature') and old_obj.db_signature is not None: + new_obj.db_signature = old_obj.db_signature + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_type(self): + return self._db_type + def __set_db_type(self, type): + self._db_type = type + self.is_dirty = True + db_type = property(__get_db_type, __set_db_type) + def db_add_type(self, type): + self._db_type = type + def db_change_type(self, type): + self._db_type = type + def db_delete_type(self, type): + self._db_type = None + + def __get_db_moduleId(self): + return self._db_moduleId + def __set_db_moduleId(self, moduleId): + self._db_moduleId = moduleId + self.is_dirty = True + db_moduleId = property(__get_db_moduleId, __set_db_moduleId) + def db_add_moduleId(self, moduleId): + self._db_moduleId = moduleId + def db_change_moduleId(self, moduleId): + self._db_moduleId = moduleId + def db_delete_moduleId(self, moduleId): + self._db_moduleId = None + + def __get_db_moduleName(self): + return self._db_moduleName + def __set_db_moduleName(self, moduleName): + self._db_moduleName = moduleName + self.is_dirty = True + db_moduleName = property(__get_db_moduleName, __set_db_moduleName) + def db_add_moduleName(self, moduleName): + self._db_moduleName = moduleName + def db_change_moduleName(self, moduleName): + self._db_moduleName = moduleName + def db_delete_moduleName(self, moduleName): + self._db_moduleName = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_signature(self): + return self._db_signature + def __set_db_signature(self, signature): + self._db_signature = signature + self.is_dirty = True + db_signature = property(__get_db_signature, __set_db_signature) + def db_add_signature(self, signature): + self._db_signature = signature + def db_change_signature(self, signature): + self._db_signature = signature + def db_delete_signature(self, signature): + self._db_signature = None + + def getPrimaryKey(self): + return self._db_id + +class DBRefProvPlan(object): + + vtType = 'ref_prov_plan' + + def __init__(self, prov_ref=None): + self._db_prov_ref = prov_ref + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBRefProvPlan.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBRefProvPlan(prov_ref=self._db_prov_ref) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap: + cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRefProvPlan() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_ref' in class_dict: + res = class_dict['prov_ref'](old_obj, trans_dict) + new_obj.db_prov_ref = res + elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None: + new_obj.db_prov_ref = old_obj.db_prov_ref + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_prov_ref(self): + return self._db_prov_ref + def __set_db_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + self.is_dirty = True + db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref) + def db_add_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_change_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_delete_prov_ref(self, prov_ref): + self._db_prov_ref = None + + + +class DBOpmArtifact(object): + + vtType = 'opm_artifact' + + def __init__(self, id=None, value=None, accounts=None): + self._db_id = id + self.db_deleted_value = [] + self._db_value = value + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmArtifact.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmArtifact(id=self._db_id) + if self._db_value is not None: + cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmArtifact() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + obj = old_obj.db_value + new_obj.db_add_value(DBOpmArtifactValue.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'): + for obj in old_obj.db_deleted_value: + n_obj = DBOpmArtifactValue.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_value is not None: + children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_value = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_value) + children.extend(self.db_deleted_accounts) + if remove: + self.db_deleted_value = [] + self.db_deleted_accounts = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_value is not None and self._db_value.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + if not self.is_new: + self.db_deleted_value.append(self._db_value) + self._db_value = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def getPrimaryKey(self): + return self._db_id + +class DBGroup(object): + + vtType = 'group' + + def __init__(self, id=None, workflow=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None, controlParameters=None): + self._db_id = id + self.db_deleted_workflow = [] + self._db_workflow = workflow + self._db_cache = cache + self._db_name = name + self._db_namespace = namespace + self._db_package = package + self._db_version = version + self.db_deleted_location = [] + self._db_location = location + self.db_deleted_functions = [] + self.db_functions_id_index = {} + if functions is None: + self._db_functions = [] + else: + self._db_functions = functions + for v in self._db_functions: + self.db_functions_id_index[v.db_id] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.db_deleted_controlParameters = [] + self.db_controlParameters_id_index = {} + self.db_controlParameters_name_index = {} + if controlParameters is None: + self._db_controlParameters = [] + else: + self._db_controlParameters = controlParameters + for v in self._db_controlParameters: + self.db_controlParameters_id_index[v.db_id] = v + self.db_controlParameters_name_index[v.db_name] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBGroup.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBGroup(id=self._db_id, + cache=self._db_cache, + name=self._db_name, + namespace=self._db_namespace, + package=self._db_package, + version=self._db_version) + if self._db_workflow is not None: + cp._db_workflow = self._db_workflow.do_copy() + if self._db_location is not None: + cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap) + if self._db_functions is None: + cp._db_functions = [] + else: + cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_controlParameters is None: + cp._db_controlParameters = [] + else: + cp._db_controlParameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_controlParameters] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + cp.db_controlParameters_id_index = dict((v.db_id, v) for v in cp._db_controlParameters) + cp.db_controlParameters_name_index = dict((v.db_name, v) for v in cp._db_controlParameters) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBGroup() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'workflow' in class_dict: + res = class_dict['workflow'](old_obj, trans_dict) + new_obj.db_workflow = res + elif hasattr(old_obj, 'db_workflow') and old_obj.db_workflow is not None: + obj = old_obj.db_workflow + new_obj.db_add_workflow(DBWorkflow.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_workflow') and hasattr(new_obj, 'db_deleted_workflow'): + for obj in old_obj.db_deleted_workflow: + n_obj = DBWorkflow.update_version(obj, trans_dict) + new_obj.db_deleted_workflow.append(n_obj) + if 'cache' in class_dict: + res = class_dict['cache'](old_obj, trans_dict) + new_obj.db_cache = res + elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None: + new_obj.db_cache = old_obj.db_cache + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'location' in class_dict: + res = class_dict['location'](old_obj, trans_dict) + new_obj.db_location = res + elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None: + obj = old_obj.db_location + new_obj.db_add_location(DBLocation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'): + for obj in old_obj.db_deleted_location: + n_obj = DBLocation.update_version(obj, trans_dict) + new_obj.db_deleted_location.append(n_obj) + if 'functions' in class_dict: + res = class_dict['functions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_function(obj) + elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None: + for obj in old_obj.db_functions: + new_obj.db_add_function(DBFunction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'): + for obj in old_obj.db_deleted_functions: + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_functions.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'controlParameters' in class_dict: + res = class_dict['controlParameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_controlParameter(obj) + elif hasattr(old_obj, 'db_controlParameters') and old_obj.db_controlParameters is not None: + for obj in old_obj.db_controlParameters: + new_obj.db_add_controlParameter(DBControlParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_controlParameters') and hasattr(new_obj, 'db_deleted_controlParameters'): + for obj in old_obj.db_deleted_controlParameters: + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_controlParameters.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_location is not None: + children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_location = None + to_del = [] + for child in self.db_functions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_function(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_controlParameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_controlParameter(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_workflow) + children.extend(self.db_deleted_location) + children.extend(self.db_deleted_functions) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_controlParameters) + if remove: + self.db_deleted_workflow = [] + self.db_deleted_location = [] + self.db_deleted_functions = [] + self.db_deleted_annotations = [] + self.db_deleted_controlParameters = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_workflow is not None and self._db_workflow.has_changes(): + return True + if self._db_location is not None and self._db_location.has_changes(): + return True + for child in self._db_functions: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_controlParameters: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_workflow(self): + return self._db_workflow + def __set_db_workflow(self, workflow): + self._db_workflow = workflow + self.is_dirty = True + db_workflow = property(__get_db_workflow, __set_db_workflow) + def db_add_workflow(self, workflow): + self._db_workflow = workflow + def db_change_workflow(self, workflow): + self._db_workflow = workflow + def db_delete_workflow(self, workflow): + if not self.is_new: + self.db_deleted_workflow.append(self._db_workflow) + self._db_workflow = None + + def __get_db_cache(self): + return self._db_cache + def __set_db_cache(self, cache): + self._db_cache = cache + self.is_dirty = True + db_cache = property(__get_db_cache, __set_db_cache) + def db_add_cache(self, cache): + self._db_cache = cache + def db_change_cache(self, cache): + self._db_cache = cache + def db_delete_cache(self, cache): + self._db_cache = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_location(self): + return self._db_location + def __set_db_location(self, location): + self._db_location = location + self.is_dirty = True + db_location = property(__get_db_location, __set_db_location) + def db_add_location(self, location): + self._db_location = location + def db_change_location(self, location): + self._db_location = location + def db_delete_location(self, location): + if not self.is_new: + self.db_deleted_location.append(self._db_location) + self._db_location = None + + def __get_db_functions(self): + return self._db_functions + def __set_db_functions(self, functions): + self._db_functions = functions + self.is_dirty = True + db_functions = property(__get_db_functions, __set_db_functions) + def db_get_functions(self): + return self._db_functions + def db_add_function(self, function): + self.is_dirty = True + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_change_function(self, function): + self.is_dirty = True + found = False + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + self._db_functions[i] = function + found = True + break + if not found: + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_delete_function(self, function): + self.is_dirty = True + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + if not self._db_functions[i].is_new: + self.db_deleted_functions.append(self._db_functions[i]) + del self._db_functions[i] + break + del self.db_functions_id_index[function.db_id] + def db_get_function(self, key): + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == key: + return self._db_functions[i] + return None + def db_get_function_by_id(self, key): + return self.db_functions_id_index[key] + def db_has_function_with_id(self, key): + return key in self.db_functions_id_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def __get_db_controlParameters(self): + return self._db_controlParameters + def __set_db_controlParameters(self, controlParameters): + self._db_controlParameters = controlParameters + self.is_dirty = True + db_controlParameters = property(__get_db_controlParameters, __set_db_controlParameters) + def db_get_controlParameters(self): + return self._db_controlParameters + def db_add_controlParameter(self, controlParameter): + self.is_dirty = True + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_change_controlParameter(self, controlParameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + self._db_controlParameters[i] = controlParameter + found = True + break + if not found: + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_delete_controlParameter(self, controlParameter): + self.is_dirty = True + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + if not self._db_controlParameters[i].is_new: + self.db_deleted_controlParameters.append(self._db_controlParameters[i]) + del self._db_controlParameters[i] + break + del self.db_controlParameters_id_index[controlParameter.db_id] + del self.db_controlParameters_name_index[controlParameter.db_name] + def db_get_controlParameter(self, key): + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == key: + return self._db_controlParameters[i] + return None + def db_get_controlParameter_by_id(self, key): + return self.db_controlParameters_id_index[key] + def db_has_controlParameter_with_id(self, key): + return key in self.db_controlParameters_id_index + def db_get_controlParameter_by_name(self, key): + return self.db_controlParameters_name_index[key] + def db_has_controlParameter_with_name(self, key): + return key in self.db_controlParameters_name_index + + def getPrimaryKey(self): + return self._db_id + +class DBLog(object): + + vtType = 'log' + + def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, workflow_execs=None, vistrail_id=None): + self._db_id = id + self._db_entity_type = entity_type + self._db_version = version + self._db_name = name + self._db_last_modified = last_modified + self.db_deleted_workflow_execs = [] + self.db_workflow_execs_id_index = {} + if workflow_execs is None: + self._db_workflow_execs = [] + else: + self._db_workflow_execs = workflow_execs + for v in self._db_workflow_execs: + self.db_workflow_execs_id_index[v.db_id] = v + self._db_vistrail_id = vistrail_id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBLog.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBLog(id=self._db_id, + entity_type=self._db_entity_type, + version=self._db_version, + name=self._db_name, + last_modified=self._db_last_modified, + vistrail_id=self._db_vistrail_id) + if self._db_workflow_execs is None: + cp._db_workflow_execs = [] + else: + cp._db_workflow_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_workflow_execs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap: + cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)] + + # recreate indices and set flags + cp.db_workflow_execs_id_index = dict((v.db_id, v) for v in cp._db_workflow_execs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBLog() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'entity_type' in class_dict: + res = class_dict['entity_type'](old_obj, trans_dict) + new_obj.db_entity_type = res + elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None: + new_obj.db_entity_type = old_obj.db_entity_type + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'last_modified' in class_dict: + res = class_dict['last_modified'](old_obj, trans_dict) + new_obj.db_last_modified = res + elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None: + new_obj.db_last_modified = old_obj.db_last_modified + if 'workflow_execs' in class_dict: + res = class_dict['workflow_execs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_workflow_exec(obj) + elif hasattr(old_obj, 'db_workflow_execs') and old_obj.db_workflow_execs is not None: + for obj in old_obj.db_workflow_execs: + new_obj.db_add_workflow_exec(DBWorkflowExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_workflow_execs') and hasattr(new_obj, 'db_deleted_workflow_execs'): + for obj in old_obj.db_deleted_workflow_execs: + n_obj = DBWorkflowExec.update_version(obj, trans_dict) + new_obj.db_deleted_workflow_execs.append(n_obj) + if 'vistrail_id' in class_dict: + res = class_dict['vistrail_id'](old_obj, trans_dict) + new_obj.db_vistrail_id = res + elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None: + new_obj.db_vistrail_id = old_obj.db_vistrail_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_workflow_execs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_workflow_exec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_workflow_execs) + if remove: + self.db_deleted_workflow_execs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_workflow_execs: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_entity_type(self): + return self._db_entity_type + def __set_db_entity_type(self, entity_type): + self._db_entity_type = entity_type + self.is_dirty = True + db_entity_type = property(__get_db_entity_type, __set_db_entity_type) + def db_add_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_change_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_delete_entity_type(self, entity_type): + self._db_entity_type = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_last_modified(self): + return self._db_last_modified + def __set_db_last_modified(self, last_modified): + self._db_last_modified = last_modified + self.is_dirty = True + db_last_modified = property(__get_db_last_modified, __set_db_last_modified) + def db_add_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_change_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_delete_last_modified(self, last_modified): + self._db_last_modified = None + + def __get_db_workflow_execs(self): + return self._db_workflow_execs + def __set_db_workflow_execs(self, workflow_execs): + self._db_workflow_execs = workflow_execs + self.is_dirty = True + db_workflow_execs = property(__get_db_workflow_execs, __set_db_workflow_execs) + def db_get_workflow_execs(self): + return self._db_workflow_execs + def db_add_workflow_exec(self, workflow_exec): + self.is_dirty = True + self._db_workflow_execs.append(workflow_exec) + self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec + def db_change_workflow_exec(self, workflow_exec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_workflow_execs)): + if self._db_workflow_execs[i].db_id == workflow_exec.db_id: + self._db_workflow_execs[i] = workflow_exec + found = True + break + if not found: + self._db_workflow_execs.append(workflow_exec) + self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec + def db_delete_workflow_exec(self, workflow_exec): + self.is_dirty = True + for i in xrange(len(self._db_workflow_execs)): + if self._db_workflow_execs[i].db_id == workflow_exec.db_id: + if not self._db_workflow_execs[i].is_new: + self.db_deleted_workflow_execs.append(self._db_workflow_execs[i]) + del self._db_workflow_execs[i] + break + del self.db_workflow_execs_id_index[workflow_exec.db_id] + def db_get_workflow_exec(self, key): + for i in xrange(len(self._db_workflow_execs)): + if self._db_workflow_execs[i].db_id == key: + return self._db_workflow_execs[i] + return None + def db_get_workflow_exec_by_id(self, key): + return self.db_workflow_execs_id_index[key] + def db_has_workflow_exec_with_id(self, key): + return key in self.db_workflow_execs_id_index + + def __get_db_vistrail_id(self): + return self._db_vistrail_id + def __set_db_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + self.is_dirty = True + db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id) + def db_add_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + def db_change_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + def db_delete_vistrail_id(self, vistrail_id): + self._db_vistrail_id = None + + def getPrimaryKey(self): + return self._db_id + +class DBLoopIteration(object): + + vtType = 'loop_iteration' + + def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, iteration=None, completed=None, error=None): + self.db_deleted_item_execs = [] + self.db_item_execs_id_index = {} + if item_execs is None: + self._db_item_execs = [] + else: + self._db_item_execs = item_execs + for v in self._db_item_execs: + self.db_item_execs_id_index[v.db_id] = v + self._db_id = id + self._db_ts_start = ts_start + self._db_ts_end = ts_end + self._db_iteration = iteration + self._db_completed = completed + self._db_error = error + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBLoopIteration.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBLoopIteration(id=self._db_id, + ts_start=self._db_ts_start, + ts_end=self._db_ts_end, + iteration=self._db_iteration, + completed=self._db_completed, + error=self._db_error) + if self._db_item_execs is None: + cp._db_item_execs = [] + else: + cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBLoopIteration() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'item_execs' in class_dict: + res = class_dict['item_execs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_item_exec(obj) + elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None: + for obj in old_obj.db_item_execs: + if obj.vtType == 'module_exec': + new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict)) + elif obj.vtType == 'group_exec': + new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict)) + elif obj.vtType == 'loop_exec': + new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'): + for obj in old_obj.db_deleted_item_execs: + if obj.vtType == 'module_exec': + n_obj = DBModuleExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'group_exec': + n_obj = DBGroupExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'loop_exec': + n_obj = DBLoopExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'ts_start' in class_dict: + res = class_dict['ts_start'](old_obj, trans_dict) + new_obj.db_ts_start = res + elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None: + new_obj.db_ts_start = old_obj.db_ts_start + if 'ts_end' in class_dict: + res = class_dict['ts_end'](old_obj, trans_dict) + new_obj.db_ts_end = res + elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None: + new_obj.db_ts_end = old_obj.db_ts_end + if 'iteration' in class_dict: + res = class_dict['iteration'](old_obj, trans_dict) + new_obj.db_iteration = res + elif hasattr(old_obj, 'db_iteration') and old_obj.db_iteration is not None: + new_obj.db_iteration = old_obj.db_iteration + if 'completed' in class_dict: + res = class_dict['completed'](old_obj, trans_dict) + new_obj.db_completed = res + elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None: + new_obj.db_completed = old_obj.db_completed + if 'error' in class_dict: + res = class_dict['error'](old_obj, trans_dict) + new_obj.db_error = res + elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None: + new_obj.db_error = old_obj.db_error + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_item_execs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_item_exec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_item_execs) + if remove: + self.db_deleted_item_execs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_item_execs: + if child.has_changes(): + return True + return False + def __get_db_item_execs(self): + return self._db_item_execs + def __set_db_item_execs(self, item_execs): + self._db_item_execs = item_execs + self.is_dirty = True + db_item_execs = property(__get_db_item_execs, __set_db_item_execs) + def db_get_item_execs(self): + return self._db_item_execs + def db_add_item_exec(self, item_exec): + self.is_dirty = True + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_change_item_exec(self, item_exec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + self._db_item_execs[i] = item_exec + found = True + break + if not found: + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_delete_item_exec(self, item_exec): + self.is_dirty = True + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + if not self._db_item_execs[i].is_new: + self.db_deleted_item_execs.append(self._db_item_execs[i]) + del self._db_item_execs[i] + break + del self.db_item_execs_id_index[item_exec.db_id] + def db_get_item_exec(self, key): + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == key: + return self._db_item_execs[i] + return None + def db_get_item_exec_by_id(self, key): + return self.db_item_execs_id_index[key] + def db_has_item_exec_with_id(self, key): + return key in self.db_item_execs_id_index + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_ts_start(self): + return self._db_ts_start + def __set_db_ts_start(self, ts_start): + self._db_ts_start = ts_start + self.is_dirty = True + db_ts_start = property(__get_db_ts_start, __set_db_ts_start) + def db_add_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_change_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_delete_ts_start(self, ts_start): + self._db_ts_start = None + + def __get_db_ts_end(self): + return self._db_ts_end + def __set_db_ts_end(self, ts_end): + self._db_ts_end = ts_end + self.is_dirty = True + db_ts_end = property(__get_db_ts_end, __set_db_ts_end) + def db_add_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_change_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_delete_ts_end(self, ts_end): + self._db_ts_end = None + + def __get_db_iteration(self): + return self._db_iteration + def __set_db_iteration(self, iteration): + self._db_iteration = iteration + self.is_dirty = True + db_iteration = property(__get_db_iteration, __set_db_iteration) + def db_add_iteration(self, iteration): + self._db_iteration = iteration + def db_change_iteration(self, iteration): + self._db_iteration = iteration + def db_delete_iteration(self, iteration): + self._db_iteration = None + + def __get_db_completed(self): + return self._db_completed + def __set_db_completed(self, completed): + self._db_completed = completed + self.is_dirty = True + db_completed = property(__get_db_completed, __set_db_completed) + def db_add_completed(self, completed): + self._db_completed = completed + def db_change_completed(self, completed): + self._db_completed = completed + def db_delete_completed(self, completed): + self._db_completed = None + + def __get_db_error(self): + return self._db_error + def __set_db_error(self, error): + self._db_error = error + self.is_dirty = True + db_error = property(__get_db_error, __set_db_error) + def db_add_error(self, error): + self._db_error = error + def db_change_error(self, error): + self._db_error = error + def db_delete_error(self, error): + self._db_error = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmAgents(object): + + vtType = 'opm_agents' + + def __init__(self, agents=None): + self.db_deleted_agents = [] + self.db_agents_id_index = {} + if agents is None: + self._db_agents = [] + else: + self._db_agents = agents + for v in self._db_agents: + self.db_agents_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAgents.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAgents() + if self._db_agents is None: + cp._db_agents = [] + else: + cp._db_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_agents] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_agents_id_index = dict((v.db_id, v) for v in cp._db_agents) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAgents() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'agents' in class_dict: + res = class_dict['agents'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_agent(obj) + elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None: + for obj in old_obj.db_agents: + new_obj.db_add_agent(DBOpmAgent.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'): + for obj in old_obj.db_deleted_agents: + n_obj = DBOpmAgent.update_version(obj, trans_dict) + new_obj.db_deleted_agents.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_agents: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_agent(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_agents) + if remove: + self.db_deleted_agents = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_agents: + if child.has_changes(): + return True + return False + def __get_db_agents(self): + return self._db_agents + def __set_db_agents(self, agents): + self._db_agents = agents + self.is_dirty = True + db_agents = property(__get_db_agents, __set_db_agents) + def db_get_agents(self): + return self._db_agents + def db_add_agent(self, agent): + self.is_dirty = True + self._db_agents.append(agent) + self.db_agents_id_index[agent.db_id] = agent + def db_change_agent(self, agent): + self.is_dirty = True + found = False + for i in xrange(len(self._db_agents)): + if self._db_agents[i].db_id == agent.db_id: + self._db_agents[i] = agent + found = True + break + if not found: + self._db_agents.append(agent) + self.db_agents_id_index[agent.db_id] = agent + def db_delete_agent(self, agent): + self.is_dirty = True + for i in xrange(len(self._db_agents)): + if self._db_agents[i].db_id == agent.db_id: + if not self._db_agents[i].is_new: + self.db_deleted_agents.append(self._db_agents[i]) + del self._db_agents[i] + break + del self.db_agents_id_index[agent.db_id] + def db_get_agent(self, key): + for i in xrange(len(self._db_agents)): + if self._db_agents[i].db_id == key: + return self._db_agents[i] + return None + def db_get_agent_by_id(self, key): + return self.db_agents_id_index[key] + def db_has_agent_with_id(self, key): + return key in self.db_agents_id_index + + + +class DBMashup(object): + + vtType = 'mashup' + + def __init__(self, id=None, name=None, version=None, aliases=None, type=None, vtid=None, layout=None, geometry=None, has_seq=None): + self._db_id = id + self._db_name = name + self._db_version = version + self.db_deleted_aliases = [] + self.db_aliases_id_index = {} + if aliases is None: + self._db_aliases = [] + else: + self._db_aliases = aliases + for v in self._db_aliases: + self.db_aliases_id_index[v.db_id] = v + self._db_type = type + self._db_vtid = vtid + self._db_layout = layout + self._db_geometry = geometry + self._db_has_seq = has_seq + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashup.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashup(id=self._db_id, + name=self._db_name, + version=self._db_version, + type=self._db_type, + vtid=self._db_vtid, + layout=self._db_layout, + geometry=self._db_geometry, + has_seq=self._db_has_seq) + if self._db_aliases is None: + cp._db_aliases = [] + else: + cp._db_aliases = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_aliases] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_vtid') and ('vistrail', self._db_vtid) in id_remap: + cp._db_vtid = id_remap[('vistrail', self._db_vtid)] + + # recreate indices and set flags + cp.db_aliases_id_index = dict((v.db_id, v) for v in cp._db_aliases) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashup() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'aliases' in class_dict: + res = class_dict['aliases'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_alias(obj) + elif hasattr(old_obj, 'db_aliases') and old_obj.db_aliases is not None: + for obj in old_obj.db_aliases: + new_obj.db_add_alias(DBMashupAlias.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_aliases') and hasattr(new_obj, 'db_deleted_aliases'): + for obj in old_obj.db_deleted_aliases: + n_obj = DBMashupAlias.update_version(obj, trans_dict) + new_obj.db_deleted_aliases.append(n_obj) + if 'type' in class_dict: + res = class_dict['type'](old_obj, trans_dict) + new_obj.db_type = res + elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None: + new_obj.db_type = old_obj.db_type + if 'vtid' in class_dict: + res = class_dict['vtid'](old_obj, trans_dict) + new_obj.db_vtid = res + elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None: + new_obj.db_vtid = old_obj.db_vtid + if 'layout' in class_dict: + res = class_dict['layout'](old_obj, trans_dict) + new_obj.db_layout = res + elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None: + new_obj.db_layout = old_obj.db_layout + if 'geometry' in class_dict: + res = class_dict['geometry'](old_obj, trans_dict) + new_obj.db_geometry = res + elif hasattr(old_obj, 'db_geometry') and old_obj.db_geometry is not None: + new_obj.db_geometry = old_obj.db_geometry + if 'has_seq' in class_dict: + res = class_dict['has_seq'](old_obj, trans_dict) + new_obj.db_has_seq = res + elif hasattr(old_obj, 'db_has_seq') and old_obj.db_has_seq is not None: + new_obj.db_has_seq = old_obj.db_has_seq + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_aliases: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_alias(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_aliases) + if remove: + self.db_deleted_aliases = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_aliases: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_aliases(self): + return self._db_aliases + def __set_db_aliases(self, aliases): + self._db_aliases = aliases + self.is_dirty = True + db_aliases = property(__get_db_aliases, __set_db_aliases) + def db_get_aliases(self): + return self._db_aliases + def db_add_alias(self, alias): + self.is_dirty = True + self._db_aliases.append(alias) + self.db_aliases_id_index[alias.db_id] = alias + def db_change_alias(self, alias): + self.is_dirty = True + found = False + for i in xrange(len(self._db_aliases)): + if self._db_aliases[i].db_id == alias.db_id: + self._db_aliases[i] = alias + found = True + break + if not found: + self._db_aliases.append(alias) + self.db_aliases_id_index[alias.db_id] = alias + def db_delete_alias(self, alias): + self.is_dirty = True + for i in xrange(len(self._db_aliases)): + if self._db_aliases[i].db_id == alias.db_id: + if not self._db_aliases[i].is_new: + self.db_deleted_aliases.append(self._db_aliases[i]) + del self._db_aliases[i] + break + del self.db_aliases_id_index[alias.db_id] + def db_get_alias(self, key): + for i in xrange(len(self._db_aliases)): + if self._db_aliases[i].db_id == key: + return self._db_aliases[i] + return None + def db_get_alias_by_id(self, key): + return self.db_aliases_id_index[key] + def db_has_alias_with_id(self, key): + return key in self.db_aliases_id_index + + def __get_db_type(self): + return self._db_type + def __set_db_type(self, type): + self._db_type = type + self.is_dirty = True + db_type = property(__get_db_type, __set_db_type) + def db_add_type(self, type): + self._db_type = type + def db_change_type(self, type): + self._db_type = type + def db_delete_type(self, type): + self._db_type = None + + def __get_db_vtid(self): + return self._db_vtid + def __set_db_vtid(self, vtid): + self._db_vtid = vtid + self.is_dirty = True + db_vtid = property(__get_db_vtid, __set_db_vtid) + def db_add_vtid(self, vtid): + self._db_vtid = vtid + def db_change_vtid(self, vtid): + self._db_vtid = vtid + def db_delete_vtid(self, vtid): + self._db_vtid = None + + def __get_db_layout(self): + return self._db_layout + def __set_db_layout(self, layout): + self._db_layout = layout + self.is_dirty = True + db_layout = property(__get_db_layout, __set_db_layout) + def db_add_layout(self, layout): + self._db_layout = layout + def db_change_layout(self, layout): + self._db_layout = layout + def db_delete_layout(self, layout): + self._db_layout = None + + def __get_db_geometry(self): + return self._db_geometry + def __set_db_geometry(self, geometry): + self._db_geometry = geometry + self.is_dirty = True + db_geometry = property(__get_db_geometry, __set_db_geometry) + def db_add_geometry(self, geometry): + self._db_geometry = geometry + def db_change_geometry(self, geometry): + self._db_geometry = geometry + def db_delete_geometry(self, geometry): + self._db_geometry = None + + def __get_db_has_seq(self): + return self._db_has_seq + def __set_db_has_seq(self, has_seq): + self._db_has_seq = has_seq + self.is_dirty = True + db_has_seq = property(__get_db_has_seq, __set_db_has_seq) + def db_add_has_seq(self, has_seq): + self._db_has_seq = has_seq + def db_change_has_seq(self, has_seq): + self._db_has_seq = has_seq + def db_delete_has_seq(self, has_seq): + self._db_has_seq = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmProcessIdCause(object): + + vtType = 'opm_process_id_cause' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmProcessIdCause.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmProcessIdCause(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_process', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmProcessIdCause() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBProvGeneration(object): + + vtType = 'prov_generation' + + def __init__(self, prov_entity=None, prov_activity=None, prov_role=None): + self.db_deleted_prov_entity = [] + self._db_prov_entity = prov_entity + self.db_deleted_prov_activity = [] + self._db_prov_activity = prov_activity + self._db_prov_role = prov_role + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvGeneration.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvGeneration(prov_role=self._db_prov_role) + if self._db_prov_entity is not None: + cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap) + if self._db_prov_activity is not None: + cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvGeneration() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_entity' in class_dict: + res = class_dict['prov_entity'](old_obj, trans_dict) + new_obj.db_prov_entity = res + elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None: + obj = old_obj.db_prov_entity + new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'): + for obj in old_obj.db_deleted_prov_entity: + n_obj = DBRefProvEntity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_entity.append(n_obj) + if 'prov_activity' in class_dict: + res = class_dict['prov_activity'](old_obj, trans_dict) + new_obj.db_prov_activity = res + elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None: + obj = old_obj.db_prov_activity + new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'): + for obj in old_obj.db_deleted_prov_activity: + n_obj = DBRefProvActivity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_activity.append(n_obj) + if 'prov_role' in class_dict: + res = class_dict['prov_role'](old_obj, trans_dict) + new_obj.db_prov_role = res + elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None: + new_obj.db_prov_role = old_obj.db_prov_role + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_prov_entity is not None: + children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_entity = None + if self._db_prov_activity is not None: + children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_activity = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_prov_entity) + children.extend(self.db_deleted_prov_activity) + if remove: + self.db_deleted_prov_entity = [] + self.db_deleted_prov_activity = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_prov_entity is not None and self._db_prov_entity.has_changes(): + return True + if self._db_prov_activity is not None and self._db_prov_activity.has_changes(): + return True + return False + def __get_db_prov_entity(self): + return self._db_prov_entity + def __set_db_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + self.is_dirty = True + db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity) + def db_add_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + def db_change_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + def db_delete_prov_entity(self, prov_entity): + if not self.is_new: + self.db_deleted_prov_entity.append(self._db_prov_entity) + self._db_prov_entity = None + + def __get_db_prov_activity(self): + return self._db_prov_activity + def __set_db_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + self.is_dirty = True + db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity) + def db_add_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_change_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_delete_prov_activity(self, prov_activity): + if not self.is_new: + self.db_deleted_prov_activity.append(self._db_prov_activity) + self._db_prov_activity = None + + def __get_db_prov_role(self): + return self._db_prov_role + def __set_db_prov_role(self, prov_role): + self._db_prov_role = prov_role + self.is_dirty = True + db_prov_role = property(__get_db_prov_role, __set_db_prov_role) + def db_add_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_change_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_delete_prov_role(self, prov_role): + self._db_prov_role = None + + + +class DBPortSpecItem(object): + + vtType = 'portSpecItem' + + def __init__(self, id=None, pos=None, module=None, package=None, namespace=None, label=None, default=None, values=None, entry_type=None): + self._db_id = id + self._db_pos = pos + self._db_module = module + self._db_package = package + self._db_namespace = namespace + self._db_label = label + self._db_default = default + self._db_values = values + self._db_entry_type = entry_type + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPortSpecItem.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPortSpecItem(id=self._db_id, + pos=self._db_pos, + module=self._db_module, + package=self._db_package, + namespace=self._db_namespace, + label=self._db_label, + default=self._db_default, + values=self._db_values, + entry_type=self._db_entry_type) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPortSpecItem() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'pos' in class_dict: + res = class_dict['pos'](old_obj, trans_dict) + new_obj.db_pos = res + elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None: + new_obj.db_pos = old_obj.db_pos + if 'module' in class_dict: + res = class_dict['module'](old_obj, trans_dict) + new_obj.db_module = res + elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None: + new_obj.db_module = old_obj.db_module + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'label' in class_dict: + res = class_dict['label'](old_obj, trans_dict) + new_obj.db_label = res + elif hasattr(old_obj, 'db_label') and old_obj.db_label is not None: + new_obj.db_label = old_obj.db_label + if 'default' in class_dict: + res = class_dict['default'](old_obj, trans_dict) + new_obj.db_default = res + elif hasattr(old_obj, 'db_default') and old_obj.db_default is not None: + new_obj.db_default = old_obj.db_default + if 'values' in class_dict: + res = class_dict['values'](old_obj, trans_dict) + new_obj.db_values = res + elif hasattr(old_obj, 'db_values') and old_obj.db_values is not None: + new_obj.db_values = old_obj.db_values + if 'entry_type' in class_dict: + res = class_dict['entry_type'](old_obj, trans_dict) + new_obj.db_entry_type = res + elif hasattr(old_obj, 'db_entry_type') and old_obj.db_entry_type is not None: + new_obj.db_entry_type = old_obj.db_entry_type + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_pos(self): + return self._db_pos + def __set_db_pos(self, pos): + self._db_pos = pos + self.is_dirty = True + db_pos = property(__get_db_pos, __set_db_pos) + def db_add_pos(self, pos): + self._db_pos = pos + def db_change_pos(self, pos): + self._db_pos = pos + def db_delete_pos(self, pos): + self._db_pos = None + + def __get_db_module(self): + return self._db_module + def __set_db_module(self, module): + self._db_module = module + self.is_dirty = True + db_module = property(__get_db_module, __set_db_module) + def db_add_module(self, module): + self._db_module = module + def db_change_module(self, module): + self._db_module = module + def db_delete_module(self, module): + self._db_module = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_label(self): + return self._db_label + def __set_db_label(self, label): + self._db_label = label + self.is_dirty = True + db_label = property(__get_db_label, __set_db_label) + def db_add_label(self, label): + self._db_label = label + def db_change_label(self, label): + self._db_label = label + def db_delete_label(self, label): + self._db_label = None + + def __get_db_default(self): + return self._db_default + def __set_db_default(self, default): + self._db_default = default + self.is_dirty = True + db_default = property(__get_db_default, __set_db_default) + def db_add_default(self, default): + self._db_default = default + def db_change_default(self, default): + self._db_default = default + def db_delete_default(self, default): + self._db_default = None + + def __get_db_values(self): + return self._db_values + def __set_db_values(self, values): + self._db_values = values + self.is_dirty = True + db_values = property(__get_db_values, __set_db_values) + def db_add_values(self, values): + self._db_values = values + def db_change_values(self, values): + self._db_values = values + def db_delete_values(self, values): + self._db_values = None + + def __get_db_entry_type(self): + return self._db_entry_type + def __set_db_entry_type(self, entry_type): + self._db_entry_type = entry_type + self.is_dirty = True + db_entry_type = property(__get_db_entry_type, __set_db_entry_type) + def db_add_entry_type(self, entry_type): + self._db_entry_type = entry_type + def db_change_entry_type(self, entry_type): + self._db_entry_type = entry_type + def db_delete_entry_type(self, entry_type): + self._db_entry_type = None + + def getPrimaryKey(self): + return self._db_id + +class DBMachine(object): + + vtType = 'machine' + + def __init__(self, id=None, name=None, os=None, architecture=None, processor=None, ram=None): + self._db_id = id + self._db_name = name + self._db_os = os + self._db_architecture = architecture + self._db_processor = processor + self._db_ram = ram + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMachine.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMachine(id=self._db_id, + name=self._db_name, + os=self._db_os, + architecture=self._db_architecture, + processor=self._db_processor, + ram=self._db_ram) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_vistrailId') and ('vistrail', self._db_vistrailId) in id_remap: + cp._db_vistrailId = id_remap[('vistrail', self._db_vistrailId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMachine() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'os' in class_dict: + res = class_dict['os'](old_obj, trans_dict) + new_obj.db_os = res + elif hasattr(old_obj, 'db_os') and old_obj.db_os is not None: + new_obj.db_os = old_obj.db_os + if 'architecture' in class_dict: + res = class_dict['architecture'](old_obj, trans_dict) + new_obj.db_architecture = res + elif hasattr(old_obj, 'db_architecture') and old_obj.db_architecture is not None: + new_obj.db_architecture = old_obj.db_architecture + if 'processor' in class_dict: + res = class_dict['processor'](old_obj, trans_dict) + new_obj.db_processor = res + elif hasattr(old_obj, 'db_processor') and old_obj.db_processor is not None: + new_obj.db_processor = old_obj.db_processor + if 'ram' in class_dict: + res = class_dict['ram'](old_obj, trans_dict) + new_obj.db_ram = res + elif hasattr(old_obj, 'db_ram') and old_obj.db_ram is not None: + new_obj.db_ram = old_obj.db_ram + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_os(self): + return self._db_os + def __set_db_os(self, os): + self._db_os = os + self.is_dirty = True + db_os = property(__get_db_os, __set_db_os) + def db_add_os(self, os): + self._db_os = os + def db_change_os(self, os): + self._db_os = os + def db_delete_os(self, os): + self._db_os = None + + def __get_db_architecture(self): + return self._db_architecture + def __set_db_architecture(self, architecture): + self._db_architecture = architecture + self.is_dirty = True + db_architecture = property(__get_db_architecture, __set_db_architecture) + def db_add_architecture(self, architecture): + self._db_architecture = architecture + def db_change_architecture(self, architecture): + self._db_architecture = architecture + def db_delete_architecture(self, architecture): + self._db_architecture = None + + def __get_db_processor(self): + return self._db_processor + def __set_db_processor(self, processor): + self._db_processor = processor + self.is_dirty = True + db_processor = property(__get_db_processor, __set_db_processor) + def db_add_processor(self, processor): + self._db_processor = processor + def db_change_processor(self, processor): + self._db_processor = processor + def db_delete_processor(self, processor): + self._db_processor = None + + def __get_db_ram(self): + return self._db_ram + def __set_db_ram(self, ram): + self._db_ram = ram + self.is_dirty = True + db_ram = property(__get_db_ram, __set_db_ram) + def db_add_ram(self, ram): + self._db_ram = ram + def db_change_ram(self, ram): + self._db_ram = ram + def db_delete_ram(self, ram): + self._db_ram = None + + def getPrimaryKey(self): + return self._db_id + +class DBAdd(object): + + vtType = 'add' + + def __init__(self, data=None, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None): + self.db_deleted_data = [] + self._db_data = data + self._db_id = id + self._db_what = what + self._db_objectId = objectId + self._db_parentObjId = parentObjId + self._db_parentObjType = parentObjType + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBAdd.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBAdd(id=self._db_id, + what=self._db_what, + objectId=self._db_objectId, + parentObjId=self._db_parentObjId, + parentObjType=self._db_parentObjType) + if self._db_data is not None: + cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap: + cp._db_objectId = id_remap[(self._db_what, self._db_objectId)] + if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap: + cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBAdd() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'data' in class_dict: + res = class_dict['data'](old_obj, trans_dict) + new_obj.db_data = res + elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None: + obj = old_obj.db_data + if obj.vtType == 'module': + new_obj.db_add_data(DBModule.update_version(obj, trans_dict)) + elif obj.vtType == 'location': + new_obj.db_add_data(DBLocation.update_version(obj, trans_dict)) + elif obj.vtType == 'annotation': + new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict)) + elif obj.vtType == 'controlParameter': + new_obj.db_add_data(DBControlParameter.update_version(obj, trans_dict)) + elif obj.vtType == 'function': + new_obj.db_add_data(DBFunction.update_version(obj, trans_dict)) + elif obj.vtType == 'connection': + new_obj.db_add_data(DBConnection.update_version(obj, trans_dict)) + elif obj.vtType == 'port': + new_obj.db_add_data(DBPort.update_version(obj, trans_dict)) + elif obj.vtType == 'parameter': + new_obj.db_add_data(DBParameter.update_version(obj, trans_dict)) + elif obj.vtType == 'portSpec': + new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict)) + elif obj.vtType == 'abstraction': + new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict)) + elif obj.vtType == 'group': + new_obj.db_add_data(DBGroup.update_version(obj, trans_dict)) + elif obj.vtType == 'other': + new_obj.db_add_data(DBOther.update_version(obj, trans_dict)) + elif obj.vtType == 'plugin_data': + new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'): + for obj in old_obj.db_deleted_data: + if obj.vtType == 'module': + n_obj = DBModule.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'location': + n_obj = DBLocation.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'annotation': + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'controlParameter': + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'function': + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'connection': + n_obj = DBConnection.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'port': + n_obj = DBPort.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'parameter': + n_obj = DBParameter.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'portSpec': + n_obj = DBPortSpec.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'abstraction': + n_obj = DBAbstraction.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'group': + n_obj = DBGroup.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'other': + n_obj = DBOther.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'plugin_data': + n_obj = DBPluginData.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'what' in class_dict: + res = class_dict['what'](old_obj, trans_dict) + new_obj.db_what = res + elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None: + new_obj.db_what = old_obj.db_what + if 'objectId' in class_dict: + res = class_dict['objectId'](old_obj, trans_dict) + new_obj.db_objectId = res + elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None: + new_obj.db_objectId = old_obj.db_objectId + if 'parentObjId' in class_dict: + res = class_dict['parentObjId'](old_obj, trans_dict) + new_obj.db_parentObjId = res + elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None: + new_obj.db_parentObjId = old_obj.db_parentObjId + if 'parentObjType' in class_dict: + res = class_dict['parentObjType'](old_obj, trans_dict) + new_obj.db_parentObjType = res + elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None: + new_obj.db_parentObjType = old_obj.db_parentObjType + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_data is not None: + children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_data = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_data) + if remove: + self.db_deleted_data = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_data is not None and self._db_data.has_changes(): + return True + return False + def __get_db_data(self): + return self._db_data + def __set_db_data(self, data): + self._db_data = data + self.is_dirty = True + db_data = property(__get_db_data, __set_db_data) + def db_add_data(self, data): + self._db_data = data + def db_change_data(self, data): + self._db_data = data + def db_delete_data(self, data): + if not self.is_new: + self.db_deleted_data.append(self._db_data) + self._db_data = None + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_what(self): + return self._db_what + def __set_db_what(self, what): + self._db_what = what + self.is_dirty = True + db_what = property(__get_db_what, __set_db_what) + def db_add_what(self, what): + self._db_what = what + def db_change_what(self, what): + self._db_what = what + def db_delete_what(self, what): + self._db_what = None + + def __get_db_objectId(self): + return self._db_objectId + def __set_db_objectId(self, objectId): + self._db_objectId = objectId + self.is_dirty = True + db_objectId = property(__get_db_objectId, __set_db_objectId) + def db_add_objectId(self, objectId): + self._db_objectId = objectId + def db_change_objectId(self, objectId): + self._db_objectId = objectId + def db_delete_objectId(self, objectId): + self._db_objectId = None + + def __get_db_parentObjId(self): + return self._db_parentObjId + def __set_db_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + self.is_dirty = True + db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId) + def db_add_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_change_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_delete_parentObjId(self, parentObjId): + self._db_parentObjId = None + + def __get_db_parentObjType(self): + return self._db_parentObjType + def __set_db_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + self.is_dirty = True + db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType) + def db_add_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_change_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_delete_parentObjType(self, parentObjType): + self._db_parentObjType = None + + def getPrimaryKey(self): + return self._db_id + +class DBOther(object): + + vtType = 'other' + + def __init__(self, id=None, key=None, value=None): + self._db_id = id + self._db_key = key + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOther.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOther(id=self._db_id, + key=self._db_key, + value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOther() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'key' in class_dict: + res = class_dict['key'](old_obj, trans_dict) + new_obj.db_key = res + elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None: + new_obj.db_key = old_obj.db_key + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_key(self): + return self._db_key + def __set_db_key(self, key): + self._db_key = key + self.is_dirty = True + db_key = property(__get_db_key, __set_db_key) + def db_add_key(self, key): + self._db_key = key + def db_change_key(self, key): + self._db_key = key + def db_delete_key(self, key): + self._db_key = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def getPrimaryKey(self): + return self._db_id + +class DBLocation(object): + + vtType = 'location' + + def __init__(self, id=None, x=None, y=None): + self._db_id = id + self._db_x = x + self._db_y = y + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBLocation.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBLocation(id=self._db_id, + x=self._db_x, + y=self._db_y) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBLocation() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'x' in class_dict: + res = class_dict['x'](old_obj, trans_dict) + new_obj.db_x = res + elif hasattr(old_obj, 'db_x') and old_obj.db_x is not None: + new_obj.db_x = old_obj.db_x + if 'y' in class_dict: + res = class_dict['y'](old_obj, trans_dict) + new_obj.db_y = res + elif hasattr(old_obj, 'db_y') and old_obj.db_y is not None: + new_obj.db_y = old_obj.db_y + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_x(self): + return self._db_x + def __set_db_x(self, x): + self._db_x = x + self.is_dirty = True + db_x = property(__get_db_x, __set_db_x) + def db_add_x(self, x): + self._db_x = x + def db_change_x(self, x): + self._db_x = x + def db_delete_x(self, x): + self._db_x = None + + def __get_db_y(self): + return self._db_y + def __set_db_y(self, y): + self._db_y = y + self.is_dirty = True + db_y = property(__get_db_y, __set_db_y) + def db_add_y(self, y): + self._db_y = y + def db_change_y(self, y): + self._db_y = y + def db_delete_y(self, y): + self._db_y = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmOverlaps(object): + + vtType = 'opm_overlaps' + + def __init__(self, opm_account_ids=None): + self.db_deleted_opm_account_ids = [] + if opm_account_ids is None: + self._db_opm_account_ids = [] + else: + self._db_opm_account_ids = opm_account_ids + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmOverlaps.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmOverlaps() + if self._db_opm_account_ids is None: + cp._db_opm_account_ids = [] + else: + cp._db_opm_account_ids = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_account_ids] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmOverlaps() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'opm_account_ids' in class_dict: + res = class_dict['opm_account_ids'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_account_id(obj) + elif hasattr(old_obj, 'db_opm_account_ids') and old_obj.db_opm_account_ids is not None: + for obj in old_obj.db_opm_account_ids: + new_obj.db_add_opm_account_id(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_account_ids') and hasattr(new_obj, 'db_deleted_opm_account_ids'): + for obj in old_obj.db_deleted_opm_account_ids: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_opm_account_ids.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_opm_account_ids: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_account_id(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_opm_account_ids) + if remove: + self.db_deleted_opm_account_ids = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_opm_account_ids: + if child.has_changes(): + return True + return False + def __get_db_opm_account_ids(self): + return self._db_opm_account_ids + def __set_db_opm_account_ids(self, opm_account_ids): + self._db_opm_account_ids = opm_account_ids + self.is_dirty = True + db_opm_account_ids = property(__get_db_opm_account_ids, __set_db_opm_account_ids) + def db_get_opm_account_ids(self): + return self._db_opm_account_ids + def db_add_opm_account_id(self, opm_account_id): + self.is_dirty = True + self._db_opm_account_ids.append(opm_account_id) + def db_change_opm_account_id(self, opm_account_id): + self.is_dirty = True + self._db_opm_account_ids.append(opm_account_id) + def db_delete_opm_account_id(self, opm_account_id): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_account_id(self, key): + return None + + + +class DBPEParameter(object): + + vtType = 'pe_parameter' + + def __init__(self, id=None, pos=None, interpolator=None, value=None, dimension=None): + self._db_id = id + self._db_pos = pos + self._db_interpolator = interpolator + self._db_value = value + self._db_dimension = dimension + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPEParameter.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPEParameter(id=self._db_id, + pos=self._db_pos, + interpolator=self._db_interpolator, + value=self._db_value, + dimension=self._db_dimension) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPEParameter() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'pos' in class_dict: + res = class_dict['pos'](old_obj, trans_dict) + new_obj.db_pos = res + elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None: + new_obj.db_pos = old_obj.db_pos + if 'interpolator' in class_dict: + res = class_dict['interpolator'](old_obj, trans_dict) + new_obj.db_interpolator = res + elif hasattr(old_obj, 'db_interpolator') and old_obj.db_interpolator is not None: + new_obj.db_interpolator = old_obj.db_interpolator + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + if 'dimension' in class_dict: + res = class_dict['dimension'](old_obj, trans_dict) + new_obj.db_dimension = res + elif hasattr(old_obj, 'db_dimension') and old_obj.db_dimension is not None: + new_obj.db_dimension = old_obj.db_dimension + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_pos(self): + return self._db_pos + def __set_db_pos(self, pos): + self._db_pos = pos + self.is_dirty = True + db_pos = property(__get_db_pos, __set_db_pos) + def db_add_pos(self, pos): + self._db_pos = pos + def db_change_pos(self, pos): + self._db_pos = pos + def db_delete_pos(self, pos): + self._db_pos = None + + def __get_db_interpolator(self): + return self._db_interpolator + def __set_db_interpolator(self, interpolator): + self._db_interpolator = interpolator + self.is_dirty = True + db_interpolator = property(__get_db_interpolator, __set_db_interpolator) + def db_add_interpolator(self, interpolator): + self._db_interpolator = interpolator + def db_change_interpolator(self, interpolator): + self._db_interpolator = interpolator + def db_delete_interpolator(self, interpolator): + self._db_interpolator = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def __get_db_dimension(self): + return self._db_dimension + def __set_db_dimension(self, dimension): + self._db_dimension = dimension + self.is_dirty = True + db_dimension = property(__get_db_dimension, __set_db_dimension) + def db_add_dimension(self, dimension): + self._db_dimension = dimension + def db_change_dimension(self, dimension): + self._db_dimension = dimension + def db_delete_dimension(self, dimension): + self._db_dimension = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmDependencies(object): + + vtType = 'opm_dependencies' + + def __init__(self, dependencys=None): + self.db_deleted_dependencys = [] + if dependencys is None: + self._db_dependencys = [] + else: + self._db_dependencys = dependencys + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmDependencies.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmDependencies() + if self._db_dependencys is None: + cp._db_dependencys = [] + else: + cp._db_dependencys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_dependencys] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmDependencies() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'dependencys' in class_dict: + res = class_dict['dependencys'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_dependency(obj) + elif hasattr(old_obj, 'db_dependencys') and old_obj.db_dependencys is not None: + for obj in old_obj.db_dependencys: + if obj.vtType == 'opm_used': + new_obj.db_add_dependency(DBOpmUsed.update_version(obj, trans_dict)) + elif obj.vtType == 'opm_was_generated_by': + new_obj.db_add_dependency(DBOpmWasGeneratedBy.update_version(obj, trans_dict)) + elif obj.vtType == 'opm_was_triggered_by': + new_obj.db_add_dependency(DBOpmWasTriggeredBy.update_version(obj, trans_dict)) + elif obj.vtType == 'opm_was_derived_from': + new_obj.db_add_dependency(DBOpmWasDerivedFrom.update_version(obj, trans_dict)) + elif obj.vtType == 'opm_was_controlled_by': + new_obj.db_add_dependency(DBOpmWasControlledBy.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_dependencys') and hasattr(new_obj, 'db_deleted_dependencys'): + for obj in old_obj.db_deleted_dependencys: + if obj.vtType == 'opm_used': + n_obj = DBOpmUsed.update_version(obj, trans_dict) + new_obj.db_deleted_dependencys.append(n_obj) + elif obj.vtType == 'opm_was_generated_by': + n_obj = DBOpmWasGeneratedBy.update_version(obj, trans_dict) + new_obj.db_deleted_dependencys.append(n_obj) + elif obj.vtType == 'opm_was_triggered_by': + n_obj = DBOpmWasTriggeredBy.update_version(obj, trans_dict) + new_obj.db_deleted_dependencys.append(n_obj) + elif obj.vtType == 'opm_was_derived_from': + n_obj = DBOpmWasDerivedFrom.update_version(obj, trans_dict) + new_obj.db_deleted_dependencys.append(n_obj) + elif obj.vtType == 'opm_was_controlled_by': + n_obj = DBOpmWasControlledBy.update_version(obj, trans_dict) + new_obj.db_deleted_dependencys.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_dependencys: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_dependency(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_dependencys) + if remove: + self.db_deleted_dependencys = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_dependencys: + if child.has_changes(): + return True + return False + def __get_db_dependencys(self): + return self._db_dependencys + def __set_db_dependencys(self, dependencys): + self._db_dependencys = dependencys + self.is_dirty = True + db_dependencys = property(__get_db_dependencys, __set_db_dependencys) + def db_get_dependencys(self): + return self._db_dependencys + def db_add_dependency(self, dependency): + self.is_dirty = True + self._db_dependencys.append(dependency) + def db_change_dependency(self, dependency): + self.is_dirty = True + self._db_dependencys.append(dependency) + def db_delete_dependency(self, dependency): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_dependency(self, key): + return None + + + +class DBParameter(object): + + vtType = 'parameter' + + def __init__(self, id=None, pos=None, name=None, type=None, val=None, alias=None): + self._db_id = id + self._db_pos = pos + self._db_name = name + self._db_type = type + self._db_val = val + self._db_alias = alias + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBParameter.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBParameter(id=self._db_id, + pos=self._db_pos, + name=self._db_name, + type=self._db_type, + val=self._db_val, + alias=self._db_alias) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBParameter() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'pos' in class_dict: + res = class_dict['pos'](old_obj, trans_dict) + new_obj.db_pos = res + elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None: + new_obj.db_pos = old_obj.db_pos + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'type' in class_dict: + res = class_dict['type'](old_obj, trans_dict) + new_obj.db_type = res + elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None: + new_obj.db_type = old_obj.db_type + if 'val' in class_dict: + res = class_dict['val'](old_obj, trans_dict) + new_obj.db_val = res + elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None: + new_obj.db_val = old_obj.db_val + if 'alias' in class_dict: + res = class_dict['alias'](old_obj, trans_dict) + new_obj.db_alias = res + elif hasattr(old_obj, 'db_alias') and old_obj.db_alias is not None: + new_obj.db_alias = old_obj.db_alias + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_pos(self): + return self._db_pos + def __set_db_pos(self, pos): + self._db_pos = pos + self.is_dirty = True + db_pos = property(__get_db_pos, __set_db_pos) + def db_add_pos(self, pos): + self._db_pos = pos + def db_change_pos(self, pos): + self._db_pos = pos + def db_delete_pos(self, pos): + self._db_pos = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_type(self): + return self._db_type + def __set_db_type(self, type): + self._db_type = type + self.is_dirty = True + db_type = property(__get_db_type, __set_db_type) + def db_add_type(self, type): + self._db_type = type + def db_change_type(self, type): + self._db_type = type + def db_delete_type(self, type): + self._db_type = None + + def __get_db_val(self): + return self._db_val + def __set_db_val(self, val): + self._db_val = val + self.is_dirty = True + db_val = property(__get_db_val, __set_db_val) + def db_add_val(self, val): + self._db_val = val + def db_change_val(self, val): + self._db_val = val + def db_delete_val(self, val): + self._db_val = None + + def __get_db_alias(self): + return self._db_alias + def __set_db_alias(self, alias): + self._db_alias = alias + self.is_dirty = True + db_alias = property(__get_db_alias, __set_db_alias) + def db_add_alias(self, alias): + self._db_alias = alias + def db_change_alias(self, alias): + self._db_alias = alias + def db_delete_alias(self, alias): + self._db_alias = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmUsed(object): + + vtType = 'opm_used' + + def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None): + self.db_deleted_effect = [] + self._db_effect = effect + self.db_deleted_role = [] + self._db_role = role + self.db_deleted_cause = [] + self._db_cause = cause + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.db_deleted_opm_times = [] + if opm_times is None: + self._db_opm_times = [] + else: + self._db_opm_times = opm_times + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmUsed.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmUsed() + if self._db_effect is not None: + cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap) + if self._db_role is not None: + cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap) + if self._db_cause is not None: + cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_opm_times is None: + cp._db_opm_times = [] + else: + cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmUsed() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'effect' in class_dict: + res = class_dict['effect'](old_obj, trans_dict) + new_obj.db_effect = res + elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None: + obj = old_obj.db_effect + new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'): + for obj in old_obj.db_deleted_effect: + n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict) + new_obj.db_deleted_effect.append(n_obj) + if 'role' in class_dict: + res = class_dict['role'](old_obj, trans_dict) + new_obj.db_role = res + elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None: + obj = old_obj.db_role + new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'): + for obj in old_obj.db_deleted_role: + n_obj = DBOpmRole.update_version(obj, trans_dict) + new_obj.db_deleted_role.append(n_obj) + if 'cause' in class_dict: + res = class_dict['cause'](old_obj, trans_dict) + new_obj.db_cause = res + elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None: + obj = old_obj.db_cause + new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'): + for obj in old_obj.db_deleted_cause: + n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict) + new_obj.db_deleted_cause.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'opm_times' in class_dict: + res = class_dict['opm_times'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_time(obj) + elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None: + for obj in old_obj.db_opm_times: + new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'): + for obj in old_obj.db_deleted_opm_times: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_opm_times.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_effect is not None: + children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_effect = None + if self._db_role is not None: + children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_role = None + if self._db_cause is not None: + children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_cause = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_opm_times: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_time(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_effect) + children.extend(self.db_deleted_role) + children.extend(self.db_deleted_cause) + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_opm_times) + if remove: + self.db_deleted_effect = [] + self.db_deleted_role = [] + self.db_deleted_cause = [] + self.db_deleted_accounts = [] + self.db_deleted_opm_times = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_effect is not None and self._db_effect.has_changes(): + return True + if self._db_role is not None and self._db_role.has_changes(): + return True + if self._db_cause is not None and self._db_cause.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_opm_times: + if child.has_changes(): + return True + return False + def __get_db_effect(self): + return self._db_effect + def __set_db_effect(self, effect): + self._db_effect = effect + self.is_dirty = True + db_effect = property(__get_db_effect, __set_db_effect) + def db_add_effect(self, effect): + self._db_effect = effect + def db_change_effect(self, effect): + self._db_effect = effect + def db_delete_effect(self, effect): + if not self.is_new: + self.db_deleted_effect.append(self._db_effect) + self._db_effect = None + + def __get_db_role(self): + return self._db_role + def __set_db_role(self, role): + self._db_role = role + self.is_dirty = True + db_role = property(__get_db_role, __set_db_role) + def db_add_role(self, role): + self._db_role = role + def db_change_role(self, role): + self._db_role = role + def db_delete_role(self, role): + if not self.is_new: + self.db_deleted_role.append(self._db_role) + self._db_role = None + + def __get_db_cause(self): + return self._db_cause + def __set_db_cause(self, cause): + self._db_cause = cause + self.is_dirty = True + db_cause = property(__get_db_cause, __set_db_cause) + def db_add_cause(self, cause): + self._db_cause = cause + def db_change_cause(self, cause): + self._db_cause = cause + def db_delete_cause(self, cause): + if not self.is_new: + self.db_deleted_cause.append(self._db_cause) + self._db_cause = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def __get_db_opm_times(self): + return self._db_opm_times + def __set_db_opm_times(self, opm_times): + self._db_opm_times = opm_times + self.is_dirty = True + db_opm_times = property(__get_db_opm_times, __set_db_opm_times) + def db_get_opm_times(self): + return self._db_opm_times + def db_add_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_change_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_delete_opm_time(self, opm_time): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_time(self, key): + return None + + + +class DBPluginData(object): + + vtType = 'plugin_data' + + def __init__(self, id=None, data=None): + self._db_id = id + self._db_data = data + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPluginData.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPluginData(id=self._db_id, + data=self._db_data) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPluginData() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'data' in class_dict: + res = class_dict['data'](old_obj, trans_dict) + new_obj.db_data = res + elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None: + new_obj.db_data = old_obj.db_data + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_data(self): + return self._db_data + def __set_db_data(self, data): + self._db_data = data + self.is_dirty = True + db_data = property(__get_db_data, __set_db_data) + def db_add_data(self, data): + self._db_data = data + def db_change_data(self, data): + self._db_data = data + def db_delete_data(self, data): + self._db_data = None + + def getPrimaryKey(self): + return self._db_id + +class DBFunction(object): + + vtType = 'function' + + def __init__(self, id=None, pos=None, name=None, parameters=None): + self._db_id = id + self._db_pos = pos + self._db_name = name + self.db_deleted_parameters = [] + self.db_parameters_id_index = {} + if parameters is None: + self._db_parameters = [] + else: + self._db_parameters = parameters + for v in self._db_parameters: + self.db_parameters_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBFunction.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBFunction(id=self._db_id, + pos=self._db_pos, + name=self._db_name) + if self._db_parameters is None: + cp._db_parameters = [] + else: + cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBFunction() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'pos' in class_dict: + res = class_dict['pos'](old_obj, trans_dict) + new_obj.db_pos = res + elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None: + new_obj.db_pos = old_obj.db_pos + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'parameters' in class_dict: + res = class_dict['parameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_parameter(obj) + elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None: + for obj in old_obj.db_parameters: + new_obj.db_add_parameter(DBParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'): + for obj in old_obj.db_deleted_parameters: + n_obj = DBParameter.update_version(obj, trans_dict) + new_obj.db_deleted_parameters.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_parameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_parameter(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_parameters) + if remove: + self.db_deleted_parameters = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_parameters: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_pos(self): + return self._db_pos + def __set_db_pos(self, pos): + self._db_pos = pos + self.is_dirty = True + db_pos = property(__get_db_pos, __set_db_pos) + def db_add_pos(self, pos): + self._db_pos = pos + def db_change_pos(self, pos): + self._db_pos = pos + def db_delete_pos(self, pos): + self._db_pos = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_parameters(self): + return self._db_parameters + def __set_db_parameters(self, parameters): + self._db_parameters = parameters + self.is_dirty = True + db_parameters = property(__get_db_parameters, __set_db_parameters) + def db_get_parameters(self): + return self._db_parameters + def db_add_parameter(self, parameter): + self.is_dirty = True + self._db_parameters.append(parameter) + self.db_parameters_id_index[parameter.db_id] = parameter + def db_change_parameter(self, parameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == parameter.db_id: + self._db_parameters[i] = parameter + found = True + break + if not found: + self._db_parameters.append(parameter) + self.db_parameters_id_index[parameter.db_id] = parameter + def db_delete_parameter(self, parameter): + self.is_dirty = True + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == parameter.db_id: + if not self._db_parameters[i].is_new: + self.db_deleted_parameters.append(self._db_parameters[i]) + del self._db_parameters[i] + break + del self.db_parameters_id_index[parameter.db_id] + def db_get_parameter(self, key): + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == key: + return self._db_parameters[i] + return None + def db_get_parameter_by_id(self, key): + return self.db_parameters_id_index[key] + def db_has_parameter_with_id(self, key): + return key in self.db_parameters_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBActionAnnotation(object): + + vtType = 'actionAnnotation' + + def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None): + self._db_id = id + self._db_key = key + self._db_value = value + self._db_action_id = action_id + self._db_date = date + self._db_user = user + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBActionAnnotation.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBActionAnnotation(id=self._db_id, + key=self._db_key, + value=self._db_value, + action_id=self._db_action_id, + date=self._db_date, + user=self._db_user) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap: + cp._db_action_id = id_remap[('action', self._db_action_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBActionAnnotation() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'key' in class_dict: + res = class_dict['key'](old_obj, trans_dict) + new_obj.db_key = res + elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None: + new_obj.db_key = old_obj.db_key + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + if 'action_id' in class_dict: + res = class_dict['action_id'](old_obj, trans_dict) + new_obj.db_action_id = res + elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None: + new_obj.db_action_id = old_obj.db_action_id + if 'date' in class_dict: + res = class_dict['date'](old_obj, trans_dict) + new_obj.db_date = res + elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None: + new_obj.db_date = old_obj.db_date + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_key(self): + return self._db_key + def __set_db_key(self, key): + self._db_key = key + self.is_dirty = True + db_key = property(__get_db_key, __set_db_key) + def db_add_key(self, key): + self._db_key = key + def db_change_key(self, key): + self._db_key = key + def db_delete_key(self, key): + self._db_key = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def __get_db_action_id(self): + return self._db_action_id + def __set_db_action_id(self, action_id): + self._db_action_id = action_id + self.is_dirty = True + db_action_id = property(__get_db_action_id, __set_db_action_id) + def db_add_action_id(self, action_id): + self._db_action_id = action_id + def db_change_action_id(self, action_id): + self._db_action_id = action_id + def db_delete_action_id(self, action_id): + self._db_action_id = None + + def __get_db_date(self): + return self._db_date + def __set_db_date(self, date): + self._db_date = date + self.is_dirty = True + db_date = property(__get_db_date, __set_db_date) + def db_add_date(self, date): + self._db_date = date + def db_change_date(self, date): + self._db_date = date + def db_delete_date(self, date): + self._db_date = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def getPrimaryKey(self): + return self._db_id + +class DBAbstraction(object): + + vtType = 'abstraction' + + def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, internal_version=None, location=None, functions=None, annotations=None, controlParameters=None): + self._db_id = id + self._db_cache = cache + self._db_name = name + self._db_namespace = namespace + self._db_package = package + self._db_version = version + self._db_internal_version = internal_version + self.db_deleted_location = [] + self._db_location = location + self.db_deleted_functions = [] + self.db_functions_id_index = {} + if functions is None: + self._db_functions = [] + else: + self._db_functions = functions + for v in self._db_functions: + self.db_functions_id_index[v.db_id] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.db_deleted_controlParameters = [] + self.db_controlParameters_id_index = {} + self.db_controlParameters_name_index = {} + if controlParameters is None: + self._db_controlParameters = [] + else: + self._db_controlParameters = controlParameters + for v in self._db_controlParameters: + self.db_controlParameters_id_index[v.db_id] = v + self.db_controlParameters_name_index[v.db_name] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBAbstraction.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBAbstraction(id=self._db_id, + cache=self._db_cache, + name=self._db_name, + namespace=self._db_namespace, + package=self._db_package, + version=self._db_version, + internal_version=self._db_internal_version) + if self._db_location is not None: + cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap) + if self._db_functions is None: + cp._db_functions = [] + else: + cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_controlParameters is None: + cp._db_controlParameters = [] + else: + cp._db_controlParameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_controlParameters] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + cp.db_controlParameters_id_index = dict((v.db_id, v) for v in cp._db_controlParameters) + cp.db_controlParameters_name_index = dict((v.db_name, v) for v in cp._db_controlParameters) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBAbstraction() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'cache' in class_dict: + res = class_dict['cache'](old_obj, trans_dict) + new_obj.db_cache = res + elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None: + new_obj.db_cache = old_obj.db_cache + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'namespace' in class_dict: + res = class_dict['namespace'](old_obj, trans_dict) + new_obj.db_namespace = res + elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None: + new_obj.db_namespace = old_obj.db_namespace + if 'package' in class_dict: + res = class_dict['package'](old_obj, trans_dict) + new_obj.db_package = res + elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None: + new_obj.db_package = old_obj.db_package + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'internal_version' in class_dict: + res = class_dict['internal_version'](old_obj, trans_dict) + new_obj.db_internal_version = res + elif hasattr(old_obj, 'db_internal_version') and old_obj.db_internal_version is not None: + new_obj.db_internal_version = old_obj.db_internal_version + if 'location' in class_dict: + res = class_dict['location'](old_obj, trans_dict) + new_obj.db_location = res + elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None: + obj = old_obj.db_location + new_obj.db_add_location(DBLocation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'): + for obj in old_obj.db_deleted_location: + n_obj = DBLocation.update_version(obj, trans_dict) + new_obj.db_deleted_location.append(n_obj) + if 'functions' in class_dict: + res = class_dict['functions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_function(obj) + elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None: + for obj in old_obj.db_functions: + new_obj.db_add_function(DBFunction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'): + for obj in old_obj.db_deleted_functions: + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_functions.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'controlParameters' in class_dict: + res = class_dict['controlParameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_controlParameter(obj) + elif hasattr(old_obj, 'db_controlParameters') and old_obj.db_controlParameters is not None: + for obj in old_obj.db_controlParameters: + new_obj.db_add_controlParameter(DBControlParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_controlParameters') and hasattr(new_obj, 'db_deleted_controlParameters'): + for obj in old_obj.db_deleted_controlParameters: + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_controlParameters.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_location is not None: + children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_location = None + to_del = [] + for child in self.db_functions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_function(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_controlParameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_controlParameter(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_location) + children.extend(self.db_deleted_functions) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_controlParameters) + if remove: + self.db_deleted_location = [] + self.db_deleted_functions = [] + self.db_deleted_annotations = [] + self.db_deleted_controlParameters = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_location is not None and self._db_location.has_changes(): + return True + for child in self._db_functions: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_controlParameters: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_cache(self): + return self._db_cache + def __set_db_cache(self, cache): + self._db_cache = cache + self.is_dirty = True + db_cache = property(__get_db_cache, __set_db_cache) + def db_add_cache(self, cache): + self._db_cache = cache + def db_change_cache(self, cache): + self._db_cache = cache + def db_delete_cache(self, cache): + self._db_cache = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_namespace(self): + return self._db_namespace + def __set_db_namespace(self, namespace): + self._db_namespace = namespace + self.is_dirty = True + db_namespace = property(__get_db_namespace, __set_db_namespace) + def db_add_namespace(self, namespace): + self._db_namespace = namespace + def db_change_namespace(self, namespace): + self._db_namespace = namespace + def db_delete_namespace(self, namespace): + self._db_namespace = None + + def __get_db_package(self): + return self._db_package + def __set_db_package(self, package): + self._db_package = package + self.is_dirty = True + db_package = property(__get_db_package, __set_db_package) + def db_add_package(self, package): + self._db_package = package + def db_change_package(self, package): + self._db_package = package + def db_delete_package(self, package): + self._db_package = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_internal_version(self): + return self._db_internal_version + def __set_db_internal_version(self, internal_version): + self._db_internal_version = internal_version + self.is_dirty = True + db_internal_version = property(__get_db_internal_version, __set_db_internal_version) + def db_add_internal_version(self, internal_version): + self._db_internal_version = internal_version + def db_change_internal_version(self, internal_version): + self._db_internal_version = internal_version + def db_delete_internal_version(self, internal_version): + self._db_internal_version = None + + def __get_db_location(self): + return self._db_location + def __set_db_location(self, location): + self._db_location = location + self.is_dirty = True + db_location = property(__get_db_location, __set_db_location) + def db_add_location(self, location): + self._db_location = location + def db_change_location(self, location): + self._db_location = location + def db_delete_location(self, location): + if not self.is_new: + self.db_deleted_location.append(self._db_location) + self._db_location = None + + def __get_db_functions(self): + return self._db_functions + def __set_db_functions(self, functions): + self._db_functions = functions + self.is_dirty = True + db_functions = property(__get_db_functions, __set_db_functions) + def db_get_functions(self): + return self._db_functions + def db_add_function(self, function): + self.is_dirty = True + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_change_function(self, function): + self.is_dirty = True + found = False + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + self._db_functions[i] = function + found = True + break + if not found: + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_delete_function(self, function): + self.is_dirty = True + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + if not self._db_functions[i].is_new: + self.db_deleted_functions.append(self._db_functions[i]) + del self._db_functions[i] + break + del self.db_functions_id_index[function.db_id] + def db_get_function(self, key): + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == key: + return self._db_functions[i] + return None + def db_get_function_by_id(self, key): + return self.db_functions_id_index[key] + def db_has_function_with_id(self, key): + return key in self.db_functions_id_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def __get_db_controlParameters(self): + return self._db_controlParameters + def __set_db_controlParameters(self, controlParameters): + self._db_controlParameters = controlParameters + self.is_dirty = True + db_controlParameters = property(__get_db_controlParameters, __set_db_controlParameters) + def db_get_controlParameters(self): + return self._db_controlParameters + def db_add_controlParameter(self, controlParameter): + self.is_dirty = True + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_change_controlParameter(self, controlParameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + self._db_controlParameters[i] = controlParameter + found = True + break + if not found: + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_delete_controlParameter(self, controlParameter): + self.is_dirty = True + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + if not self._db_controlParameters[i].is_new: + self.db_deleted_controlParameters.append(self._db_controlParameters[i]) + del self._db_controlParameters[i] + break + del self.db_controlParameters_id_index[controlParameter.db_id] + del self.db_controlParameters_name_index[controlParameter.db_name] + def db_get_controlParameter(self, key): + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == key: + return self._db_controlParameters[i] + return None + def db_get_controlParameter_by_id(self, key): + return self.db_controlParameters_id_index[key] + def db_has_controlParameter_with_id(self, key): + return key in self.db_controlParameters_id_index + def db_get_controlParameter_by_name(self, key): + return self.db_controlParameters_name_index[key] + def db_has_controlParameter_with_name(self, key): + return key in self.db_controlParameters_name_index + + def getPrimaryKey(self): + return self._db_id + +class DBMashupAlias(object): + + vtType = 'mashup_alias' + + def __init__(self, id=None, name=None, component=None): + self._db_id = id + self._db_name = name + self.db_deleted_component = [] + self._db_component = component + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashupAlias.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashupAlias(id=self._db_id, + name=self._db_name) + if self._db_component is not None: + cp._db_component = self._db_component.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashupAlias() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'component' in class_dict: + res = class_dict['component'](old_obj, trans_dict) + new_obj.db_component = res + elif hasattr(old_obj, 'db_component') and old_obj.db_component is not None: + obj = old_obj.db_component + new_obj.db_add_component(DBMashupComponent.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_component') and hasattr(new_obj, 'db_deleted_component'): + for obj in old_obj.db_deleted_component: + n_obj = DBMashupComponent.update_version(obj, trans_dict) + new_obj.db_deleted_component.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_component is not None: + children.extend(self._db_component.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_component = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_component) + if remove: + self.db_deleted_component = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_component is not None and self._db_component.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_component(self): + return self._db_component + def __set_db_component(self, component): + self._db_component = component + self.is_dirty = True + db_component = property(__get_db_component, __set_db_component) + def db_add_component(self, component): + self._db_component = component + def db_change_component(self, component): + self._db_component = component + def db_delete_component(self, component): + if not self.is_new: + self.db_deleted_component.append(self._db_component) + self._db_component = None + + def getPrimaryKey(self): + return self._db_id + +class DBWorkflow(object): + + vtType = 'workflow' + + def __init__(self, modules=None, id=None, entity_type=None, name=None, version=None, last_modified=None, connections=None, annotations=None, plugin_datas=None, others=None, vistrail_id=None): + self.db_deleted_modules = [] + self.db_modules_id_index = {} + if modules is None: + self._db_modules = [] + else: + self._db_modules = modules + for v in self._db_modules: + self.db_modules_id_index[v.db_id] = v + self._db_id = id + self._db_entity_type = entity_type + self._db_name = name + self._db_version = version + self._db_last_modified = last_modified + self.db_deleted_connections = [] + self.db_connections_id_index = {} + if connections is None: + self._db_connections = [] + else: + self._db_connections = connections + for v in self._db_connections: + self.db_connections_id_index[v.db_id] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_deleted_plugin_datas = [] + self.db_plugin_datas_id_index = {} + if plugin_datas is None: + self._db_plugin_datas = [] + else: + self._db_plugin_datas = plugin_datas + for v in self._db_plugin_datas: + self.db_plugin_datas_id_index[v.db_id] = v + self.db_deleted_others = [] + self.db_others_id_index = {} + if others is None: + self._db_others = [] + else: + self._db_others = others + for v in self._db_others: + self.db_others_id_index[v.db_id] = v + self._db_vistrail_id = vistrail_id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBWorkflow.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBWorkflow(id=self._db_id, + entity_type=self._db_entity_type, + name=self._db_name, + version=self._db_version, + last_modified=self._db_last_modified, + vistrail_id=self._db_vistrail_id) + if self._db_modules is None: + cp._db_modules = [] + else: + cp._db_modules = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_modules] + if self._db_connections is None: + cp._db_connections = [] + else: + cp._db_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_connections] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_plugin_datas is None: + cp._db_plugin_datas = [] + else: + cp._db_plugin_datas = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_plugin_datas] + if self._db_others is None: + cp._db_others = [] + else: + cp._db_others = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_others] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap: + cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)] + + # recreate indices and set flags + cp.db_modules_id_index = dict((v.db_id, v) for v in cp._db_modules) + cp.db_connections_id_index = dict((v.db_id, v) for v in cp._db_connections) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_plugin_datas_id_index = dict((v.db_id, v) for v in cp._db_plugin_datas) + cp.db_others_id_index = dict((v.db_id, v) for v in cp._db_others) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBWorkflow() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'modules' in class_dict: + res = class_dict['modules'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_module(obj) + elif hasattr(old_obj, 'db_modules') and old_obj.db_modules is not None: + for obj in old_obj.db_modules: + if obj.vtType == 'module': + new_obj.db_add_module(DBModule.update_version(obj, trans_dict)) + elif obj.vtType == 'abstraction': + new_obj.db_add_module(DBAbstraction.update_version(obj, trans_dict)) + elif obj.vtType == 'group': + new_obj.db_add_module(DBGroup.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_modules') and hasattr(new_obj, 'db_deleted_modules'): + for obj in old_obj.db_deleted_modules: + if obj.vtType == 'module': + n_obj = DBModule.update_version(obj, trans_dict) + new_obj.db_deleted_modules.append(n_obj) + elif obj.vtType == 'abstraction': + n_obj = DBAbstraction.update_version(obj, trans_dict) + new_obj.db_deleted_modules.append(n_obj) + elif obj.vtType == 'group': + n_obj = DBGroup.update_version(obj, trans_dict) + new_obj.db_deleted_modules.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'entity_type' in class_dict: + res = class_dict['entity_type'](old_obj, trans_dict) + new_obj.db_entity_type = res + elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None: + new_obj.db_entity_type = old_obj.db_entity_type + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'last_modified' in class_dict: + res = class_dict['last_modified'](old_obj, trans_dict) + new_obj.db_last_modified = res + elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None: + new_obj.db_last_modified = old_obj.db_last_modified + if 'connections' in class_dict: + res = class_dict['connections'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_connection(obj) + elif hasattr(old_obj, 'db_connections') and old_obj.db_connections is not None: + for obj in old_obj.db_connections: + new_obj.db_add_connection(DBConnection.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_connections') and hasattr(new_obj, 'db_deleted_connections'): + for obj in old_obj.db_deleted_connections: + n_obj = DBConnection.update_version(obj, trans_dict) + new_obj.db_deleted_connections.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'plugin_datas' in class_dict: + res = class_dict['plugin_datas'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_plugin_data(obj) + elif hasattr(old_obj, 'db_plugin_datas') and old_obj.db_plugin_datas is not None: + for obj in old_obj.db_plugin_datas: + new_obj.db_add_plugin_data(DBPluginData.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_plugin_datas') and hasattr(new_obj, 'db_deleted_plugin_datas'): + for obj in old_obj.db_deleted_plugin_datas: + n_obj = DBPluginData.update_version(obj, trans_dict) + new_obj.db_deleted_plugin_datas.append(n_obj) + if 'others' in class_dict: + res = class_dict['others'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_other(obj) + elif hasattr(old_obj, 'db_others') and old_obj.db_others is not None: + for obj in old_obj.db_others: + new_obj.db_add_other(DBOther.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_others') and hasattr(new_obj, 'db_deleted_others'): + for obj in old_obj.db_deleted_others: + n_obj = DBOther.update_version(obj, trans_dict) + new_obj.db_deleted_others.append(n_obj) + if 'vistrail_id' in class_dict: + res = class_dict['vistrail_id'](old_obj, trans_dict) + new_obj.db_vistrail_id = res + elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None: + new_obj.db_vistrail_id = old_obj.db_vistrail_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_connections: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_connection(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_plugin_datas: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_plugin_data(child) + to_del = [] + for child in self.db_others: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_other(child) + to_del = [] + for child in self.db_modules: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_module(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_connections) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_plugin_datas) + children.extend(self.db_deleted_others) + children.extend(self.db_deleted_modules) + if remove: + self.db_deleted_connections = [] + self.db_deleted_annotations = [] + self.db_deleted_plugin_datas = [] + self.db_deleted_others = [] + self.db_deleted_modules = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_connections: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_plugin_datas: + if child.has_changes(): + return True + for child in self._db_others: + if child.has_changes(): + return True + for child in self._db_modules: + if child.has_changes(): + return True + return False + def __get_db_modules(self): + return self._db_modules + def __set_db_modules(self, modules): + self._db_modules = modules + self.is_dirty = True + db_modules = property(__get_db_modules, __set_db_modules) + def db_get_modules(self): + return self._db_modules + def db_add_module(self, module): + self.is_dirty = True + self._db_modules.append(module) + self.db_modules_id_index[module.db_id] = module + def db_change_module(self, module): + self.is_dirty = True + found = False + for i in xrange(len(self._db_modules)): + if self._db_modules[i].db_id == module.db_id: + self._db_modules[i] = module + found = True + break + if not found: + self._db_modules.append(module) + self.db_modules_id_index[module.db_id] = module + def db_delete_module(self, module): + self.is_dirty = True + for i in xrange(len(self._db_modules)): + if self._db_modules[i].db_id == module.db_id: + if not self._db_modules[i].is_new: + self.db_deleted_modules.append(self._db_modules[i]) + del self._db_modules[i] + break + del self.db_modules_id_index[module.db_id] + def db_get_module(self, key): + for i in xrange(len(self._db_modules)): + if self._db_modules[i].db_id == key: + return self._db_modules[i] + return None + def db_get_module_by_id(self, key): + return self.db_modules_id_index[key] + def db_has_module_with_id(self, key): + return key in self.db_modules_id_index + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_entity_type(self): + return self._db_entity_type + def __set_db_entity_type(self, entity_type): + self._db_entity_type = entity_type + self.is_dirty = True + db_entity_type = property(__get_db_entity_type, __set_db_entity_type) + def db_add_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_change_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_delete_entity_type(self, entity_type): + self._db_entity_type = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_last_modified(self): + return self._db_last_modified + def __set_db_last_modified(self, last_modified): + self._db_last_modified = last_modified + self.is_dirty = True + db_last_modified = property(__get_db_last_modified, __set_db_last_modified) + def db_add_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_change_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_delete_last_modified(self, last_modified): + self._db_last_modified = None + + def __get_db_connections(self): + return self._db_connections + def __set_db_connections(self, connections): + self._db_connections = connections + self.is_dirty = True + db_connections = property(__get_db_connections, __set_db_connections) + def db_get_connections(self): + return self._db_connections + def db_add_connection(self, connection): + self.is_dirty = True + self._db_connections.append(connection) + self.db_connections_id_index[connection.db_id] = connection + def db_change_connection(self, connection): + self.is_dirty = True + found = False + for i in xrange(len(self._db_connections)): + if self._db_connections[i].db_id == connection.db_id: + self._db_connections[i] = connection + found = True + break + if not found: + self._db_connections.append(connection) + self.db_connections_id_index[connection.db_id] = connection + def db_delete_connection(self, connection): + self.is_dirty = True + for i in xrange(len(self._db_connections)): + if self._db_connections[i].db_id == connection.db_id: + if not self._db_connections[i].is_new: + self.db_deleted_connections.append(self._db_connections[i]) + del self._db_connections[i] + break + del self.db_connections_id_index[connection.db_id] + def db_get_connection(self, key): + for i in xrange(len(self._db_connections)): + if self._db_connections[i].db_id == key: + return self._db_connections[i] + return None + def db_get_connection_by_id(self, key): + return self.db_connections_id_index[key] + def db_has_connection_with_id(self, key): + return key in self.db_connections_id_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + + def __get_db_plugin_datas(self): + return self._db_plugin_datas + def __set_db_plugin_datas(self, plugin_datas): + self._db_plugin_datas = plugin_datas + self.is_dirty = True + db_plugin_datas = property(__get_db_plugin_datas, __set_db_plugin_datas) + def db_get_plugin_datas(self): + return self._db_plugin_datas + def db_add_plugin_data(self, plugin_data): + self.is_dirty = True + self._db_plugin_datas.append(plugin_data) + self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data + def db_change_plugin_data(self, plugin_data): + self.is_dirty = True + found = False + for i in xrange(len(self._db_plugin_datas)): + if self._db_plugin_datas[i].db_id == plugin_data.db_id: + self._db_plugin_datas[i] = plugin_data + found = True + break + if not found: + self._db_plugin_datas.append(plugin_data) + self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data + def db_delete_plugin_data(self, plugin_data): + self.is_dirty = True + for i in xrange(len(self._db_plugin_datas)): + if self._db_plugin_datas[i].db_id == plugin_data.db_id: + if not self._db_plugin_datas[i].is_new: + self.db_deleted_plugin_datas.append(self._db_plugin_datas[i]) + del self._db_plugin_datas[i] + break + del self.db_plugin_datas_id_index[plugin_data.db_id] + def db_get_plugin_data(self, key): + for i in xrange(len(self._db_plugin_datas)): + if self._db_plugin_datas[i].db_id == key: + return self._db_plugin_datas[i] + return None + def db_get_plugin_data_by_id(self, key): + return self.db_plugin_datas_id_index[key] + def db_has_plugin_data_with_id(self, key): + return key in self.db_plugin_datas_id_index + + def __get_db_others(self): + return self._db_others + def __set_db_others(self, others): + self._db_others = others + self.is_dirty = True + db_others = property(__get_db_others, __set_db_others) + def db_get_others(self): + return self._db_others + def db_add_other(self, other): + self.is_dirty = True + self._db_others.append(other) + self.db_others_id_index[other.db_id] = other + def db_change_other(self, other): + self.is_dirty = True + found = False + for i in xrange(len(self._db_others)): + if self._db_others[i].db_id == other.db_id: + self._db_others[i] = other + found = True + break + if not found: + self._db_others.append(other) + self.db_others_id_index[other.db_id] = other + def db_delete_other(self, other): + self.is_dirty = True + for i in xrange(len(self._db_others)): + if self._db_others[i].db_id == other.db_id: + if not self._db_others[i].is_new: + self.db_deleted_others.append(self._db_others[i]) + del self._db_others[i] + break + del self.db_others_id_index[other.db_id] + def db_get_other(self, key): + for i in xrange(len(self._db_others)): + if self._db_others[i].db_id == key: + return self._db_others[i] + return None + def db_get_other_by_id(self, key): + return self.db_others_id_index[key] + def db_has_other_with_id(self, key): + return key in self.db_others_id_index + + def __get_db_vistrail_id(self): + return self._db_vistrail_id + def __set_db_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + self.is_dirty = True + db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id) + def db_add_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + def db_change_vistrail_id(self, vistrail_id): + self._db_vistrail_id = vistrail_id + def db_delete_vistrail_id(self, vistrail_id): + self._db_vistrail_id = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmArtifactIdCause(object): + + vtType = 'opm_artifact_id_cause' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmArtifactIdCause.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmArtifactIdCause(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_artifact', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmArtifactIdCause() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBRefProvEntity(object): + + vtType = 'ref_prov_entity' + + def __init__(self, prov_ref=None): + self._db_prov_ref = prov_ref + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBRefProvEntity.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBRefProvEntity(prov_ref=self._db_prov_ref) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap: + cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRefProvEntity() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_ref' in class_dict: + res = class_dict['prov_ref'](old_obj, trans_dict) + new_obj.db_prov_ref = res + elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None: + new_obj.db_prov_ref = old_obj.db_prov_ref + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_prov_ref(self): + return self._db_prov_ref + def __set_db_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + self.is_dirty = True + db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref) + def db_add_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_change_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_delete_prov_ref(self, prov_ref): + self._db_prov_ref = None + + + +class DBProvActivity(object): + + vtType = 'prov_activity' + + def __init__(self, id=None, startTime=None, endTime=None, vt_id=None, vt_type=None, vt_cached=None, vt_completed=None, vt_machine_id=None, vt_error=None, is_part_of=None): + self._db_id = id + self._db_startTime = startTime + self._db_endTime = endTime + self._db_vt_id = vt_id + self._db_vt_type = vt_type + self._db_vt_cached = vt_cached + self._db_vt_completed = vt_completed + self._db_vt_machine_id = vt_machine_id + self._db_vt_error = vt_error + self.db_deleted_is_part_of = [] + self._db_is_part_of = is_part_of + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvActivity.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvActivity(id=self._db_id, + startTime=self._db_startTime, + endTime=self._db_endTime, + vt_id=self._db_vt_id, + vt_type=self._db_vt_type, + vt_cached=self._db_vt_cached, + vt_completed=self._db_vt_completed, + vt_machine_id=self._db_vt_machine_id, + vt_error=self._db_vt_error) + if self._db_is_part_of is not None: + cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvActivity() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'startTime' in class_dict: + res = class_dict['startTime'](old_obj, trans_dict) + new_obj.db_startTime = res + elif hasattr(old_obj, 'db_startTime') and old_obj.db_startTime is not None: + new_obj.db_startTime = old_obj.db_startTime + if 'endTime' in class_dict: + res = class_dict['endTime'](old_obj, trans_dict) + new_obj.db_endTime = res + elif hasattr(old_obj, 'db_endTime') and old_obj.db_endTime is not None: + new_obj.db_endTime = old_obj.db_endTime + if 'vt_id' in class_dict: + res = class_dict['vt_id'](old_obj, trans_dict) + new_obj.db_vt_id = res + elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None: + new_obj.db_vt_id = old_obj.db_vt_id + if 'vt_type' in class_dict: + res = class_dict['vt_type'](old_obj, trans_dict) + new_obj.db_vt_type = res + elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None: + new_obj.db_vt_type = old_obj.db_vt_type + if 'vt_cached' in class_dict: + res = class_dict['vt_cached'](old_obj, trans_dict) + new_obj.db_vt_cached = res + elif hasattr(old_obj, 'db_vt_cached') and old_obj.db_vt_cached is not None: + new_obj.db_vt_cached = old_obj.db_vt_cached + if 'vt_completed' in class_dict: + res = class_dict['vt_completed'](old_obj, trans_dict) + new_obj.db_vt_completed = res + elif hasattr(old_obj, 'db_vt_completed') and old_obj.db_vt_completed is not None: + new_obj.db_vt_completed = old_obj.db_vt_completed + if 'vt_machine_id' in class_dict: + res = class_dict['vt_machine_id'](old_obj, trans_dict) + new_obj.db_vt_machine_id = res + elif hasattr(old_obj, 'db_vt_machine_id') and old_obj.db_vt_machine_id is not None: + new_obj.db_vt_machine_id = old_obj.db_vt_machine_id + if 'vt_error' in class_dict: + res = class_dict['vt_error'](old_obj, trans_dict) + new_obj.db_vt_error = res + elif hasattr(old_obj, 'db_vt_error') and old_obj.db_vt_error is not None: + new_obj.db_vt_error = old_obj.db_vt_error + if 'is_part_of' in class_dict: + res = class_dict['is_part_of'](old_obj, trans_dict) + new_obj.db_is_part_of = res + elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None: + obj = old_obj.db_is_part_of + new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'): + for obj in old_obj.db_deleted_is_part_of: + n_obj = DBIsPartOf.update_version(obj, trans_dict) + new_obj.db_deleted_is_part_of.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_is_part_of is not None: + children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_is_part_of = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_is_part_of) + if remove: + self.db_deleted_is_part_of = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_is_part_of is not None and self._db_is_part_of.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_startTime(self): + return self._db_startTime + def __set_db_startTime(self, startTime): + self._db_startTime = startTime + self.is_dirty = True + db_startTime = property(__get_db_startTime, __set_db_startTime) + def db_add_startTime(self, startTime): + self._db_startTime = startTime + def db_change_startTime(self, startTime): + self._db_startTime = startTime + def db_delete_startTime(self, startTime): + self._db_startTime = None + + def __get_db_endTime(self): + return self._db_endTime + def __set_db_endTime(self, endTime): + self._db_endTime = endTime + self.is_dirty = True + db_endTime = property(__get_db_endTime, __set_db_endTime) + def db_add_endTime(self, endTime): + self._db_endTime = endTime + def db_change_endTime(self, endTime): + self._db_endTime = endTime + def db_delete_endTime(self, endTime): + self._db_endTime = None + + def __get_db_vt_id(self): + return self._db_vt_id + def __set_db_vt_id(self, vt_id): + self._db_vt_id = vt_id + self.is_dirty = True + db_vt_id = property(__get_db_vt_id, __set_db_vt_id) + def db_add_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_change_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_delete_vt_id(self, vt_id): + self._db_vt_id = None + + def __get_db_vt_type(self): + return self._db_vt_type + def __set_db_vt_type(self, vt_type): + self._db_vt_type = vt_type + self.is_dirty = True + db_vt_type = property(__get_db_vt_type, __set_db_vt_type) + def db_add_vt_type(self, vt_type): + self._db_vt_type = vt_type + def db_change_vt_type(self, vt_type): + self._db_vt_type = vt_type + def db_delete_vt_type(self, vt_type): + self._db_vt_type = None + + def __get_db_vt_cached(self): + return self._db_vt_cached + def __set_db_vt_cached(self, vt_cached): + self._db_vt_cached = vt_cached + self.is_dirty = True + db_vt_cached = property(__get_db_vt_cached, __set_db_vt_cached) + def db_add_vt_cached(self, vt_cached): + self._db_vt_cached = vt_cached + def db_change_vt_cached(self, vt_cached): + self._db_vt_cached = vt_cached + def db_delete_vt_cached(self, vt_cached): + self._db_vt_cached = None + + def __get_db_vt_completed(self): + return self._db_vt_completed + def __set_db_vt_completed(self, vt_completed): + self._db_vt_completed = vt_completed + self.is_dirty = True + db_vt_completed = property(__get_db_vt_completed, __set_db_vt_completed) + def db_add_vt_completed(self, vt_completed): + self._db_vt_completed = vt_completed + def db_change_vt_completed(self, vt_completed): + self._db_vt_completed = vt_completed + def db_delete_vt_completed(self, vt_completed): + self._db_vt_completed = None + + def __get_db_vt_machine_id(self): + return self._db_vt_machine_id + def __set_db_vt_machine_id(self, vt_machine_id): + self._db_vt_machine_id = vt_machine_id + self.is_dirty = True + db_vt_machine_id = property(__get_db_vt_machine_id, __set_db_vt_machine_id) + def db_add_vt_machine_id(self, vt_machine_id): + self._db_vt_machine_id = vt_machine_id + def db_change_vt_machine_id(self, vt_machine_id): + self._db_vt_machine_id = vt_machine_id + def db_delete_vt_machine_id(self, vt_machine_id): + self._db_vt_machine_id = None + + def __get_db_vt_error(self): + return self._db_vt_error + def __set_db_vt_error(self, vt_error): + self._db_vt_error = vt_error + self.is_dirty = True + db_vt_error = property(__get_db_vt_error, __set_db_vt_error) + def db_add_vt_error(self, vt_error): + self._db_vt_error = vt_error + def db_change_vt_error(self, vt_error): + self._db_vt_error = vt_error + def db_delete_vt_error(self, vt_error): + self._db_vt_error = None + + def __get_db_is_part_of(self): + return self._db_is_part_of + def __set_db_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + self.is_dirty = True + db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of) + def db_add_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + def db_change_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + def db_delete_is_part_of(self, is_part_of): + if not self.is_new: + self.db_deleted_is_part_of.append(self._db_is_part_of) + self._db_is_part_of = None + + def getPrimaryKey(self): + return self._db_id + +class DBMashupAction(object): + + vtType = 'mashup_action' + + def __init__(self, id=None, prevId=None, date=None, user=None, mashup=None): + self._db_id = id + self._db_prevId = prevId + self._db_date = date + self._db_user = user + self.db_deleted_mashup = [] + self._db_mashup = mashup + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashupAction.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashupAction(id=self._db_id, + prevId=self._db_prevId, + date=self._db_date, + user=self._db_user) + if self._db_mashup is not None: + cp._db_mashup = self._db_mashup.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prevId') and ('mashup_action', self._db_prevId) in id_remap: + cp._db_prevId = id_remap[('mashup_action', self._db_prevId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashupAction() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'prevId' in class_dict: + res = class_dict['prevId'](old_obj, trans_dict) + new_obj.db_prevId = res + elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None: + new_obj.db_prevId = old_obj.db_prevId + if 'date' in class_dict: + res = class_dict['date'](old_obj, trans_dict) + new_obj.db_date = res + elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None: + new_obj.db_date = old_obj.db_date + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + if 'mashup' in class_dict: + res = class_dict['mashup'](old_obj, trans_dict) + new_obj.db_mashup = res + elif hasattr(old_obj, 'db_mashup') and old_obj.db_mashup is not None: + obj = old_obj.db_mashup + new_obj.db_add_mashup(DBMashup.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_mashup') and hasattr(new_obj, 'db_deleted_mashup'): + for obj in old_obj.db_deleted_mashup: + n_obj = DBMashup.update_version(obj, trans_dict) + new_obj.db_deleted_mashup.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_mashup is not None: + children.extend(self._db_mashup.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_mashup = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_mashup) + if remove: + self.db_deleted_mashup = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_mashup is not None and self._db_mashup.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_prevId(self): + return self._db_prevId + def __set_db_prevId(self, prevId): + self._db_prevId = prevId + self.is_dirty = True + db_prevId = property(__get_db_prevId, __set_db_prevId) + def db_add_prevId(self, prevId): + self._db_prevId = prevId + def db_change_prevId(self, prevId): + self._db_prevId = prevId + def db_delete_prevId(self, prevId): + self._db_prevId = None + + def __get_db_date(self): + return self._db_date + def __set_db_date(self, date): + self._db_date = date + self.is_dirty = True + db_date = property(__get_db_date, __set_db_date) + def db_add_date(self, date): + self._db_date = date + def db_change_date(self, date): + self._db_date = date + def db_delete_date(self, date): + self._db_date = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def __get_db_mashup(self): + return self._db_mashup + def __set_db_mashup(self, mashup): + self._db_mashup = mashup + self.is_dirty = True + db_mashup = property(__get_db_mashup, __set_db_mashup) + def db_add_mashup(self, mashup): + self._db_mashup = mashup + def db_change_mashup(self, mashup): + self._db_mashup = mashup + def db_delete_mashup(self, mashup): + if not self.is_new: + self.db_deleted_mashup.append(self._db_mashup) + self._db_mashup = None + + def getPrimaryKey(self): + return self._db_id + +class DBProvUsage(object): + + vtType = 'prov_usage' + + def __init__(self, prov_activity=None, prov_entity=None, prov_role=None): + self.db_deleted_prov_activity = [] + self._db_prov_activity = prov_activity + self.db_deleted_prov_entity = [] + self._db_prov_entity = prov_entity + self._db_prov_role = prov_role + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvUsage.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvUsage(prov_role=self._db_prov_role) + if self._db_prov_activity is not None: + cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap) + if self._db_prov_entity is not None: + cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvUsage() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_activity' in class_dict: + res = class_dict['prov_activity'](old_obj, trans_dict) + new_obj.db_prov_activity = res + elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None: + obj = old_obj.db_prov_activity + new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'): + for obj in old_obj.db_deleted_prov_activity: + n_obj = DBRefProvActivity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_activity.append(n_obj) + if 'prov_entity' in class_dict: + res = class_dict['prov_entity'](old_obj, trans_dict) + new_obj.db_prov_entity = res + elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None: + obj = old_obj.db_prov_entity + new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'): + for obj in old_obj.db_deleted_prov_entity: + n_obj = DBRefProvEntity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_entity.append(n_obj) + if 'prov_role' in class_dict: + res = class_dict['prov_role'](old_obj, trans_dict) + new_obj.db_prov_role = res + elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None: + new_obj.db_prov_role = old_obj.db_prov_role + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_prov_activity is not None: + children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_activity = None + if self._db_prov_entity is not None: + children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_entity = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_prov_activity) + children.extend(self.db_deleted_prov_entity) + if remove: + self.db_deleted_prov_activity = [] + self.db_deleted_prov_entity = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_prov_activity is not None and self._db_prov_activity.has_changes(): + return True + if self._db_prov_entity is not None and self._db_prov_entity.has_changes(): + return True + return False + def __get_db_prov_activity(self): + return self._db_prov_activity + def __set_db_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + self.is_dirty = True + db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity) + def db_add_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_change_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_delete_prov_activity(self, prov_activity): + if not self.is_new: + self.db_deleted_prov_activity.append(self._db_prov_activity) + self._db_prov_activity = None + + def __get_db_prov_entity(self): + return self._db_prov_entity + def __set_db_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + self.is_dirty = True + db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity) + def db_add_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + def db_change_prov_entity(self, prov_entity): + self._db_prov_entity = prov_entity + def db_delete_prov_entity(self, prov_entity): + if not self.is_new: + self.db_deleted_prov_entity.append(self._db_prov_entity) + self._db_prov_entity = None + + def __get_db_prov_role(self): + return self._db_prov_role + def __set_db_prov_role(self, prov_role): + self._db_prov_role = prov_role + self.is_dirty = True + db_prov_role = property(__get_db_prov_role, __set_db_prov_role) + def db_add_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_change_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_delete_prov_role(self, prov_role): + self._db_prov_role = None + + + +class DBOpmArtifactValue(object): + + vtType = 'opm_artifact_value' + + def __init__(self, value=None): + self.db_deleted_value = [] + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmArtifactValue.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmArtifactValue() + if self._db_value is not None: + cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmArtifactValue() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + obj = old_obj.db_value + if obj.vtType == 'portSpec': + new_obj.db_add_value(DBPortSpec.update_version(obj, trans_dict)) + elif obj.vtType == 'function': + new_obj.db_add_value(DBFunction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'): + for obj in old_obj.db_deleted_value: + if obj.vtType == 'portSpec': + n_obj = DBPortSpec.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + elif obj.vtType == 'function': + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_value is not None: + children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_value = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_value) + if remove: + self.db_deleted_value = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_value is not None and self._db_value.has_changes(): + return True + return False + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + if not self.is_new: + self.db_deleted_value.append(self._db_value) + self._db_value = None + + + +class DBOpmArtifactIdEffect(object): + + vtType = 'opm_artifact_id_effect' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmArtifactIdEffect.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmArtifactIdEffect(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_artifact', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmArtifactIdEffect() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBOpmGraph(object): + + vtType = 'opm_graph' + + def __init__(self, accounts=None, processes=None, artifacts=None, agents=None, dependencies=None): + self.db_deleted_accounts = [] + self._db_accounts = accounts + self.db_deleted_processes = [] + self._db_processes = processes + self.db_deleted_artifacts = [] + self._db_artifacts = artifacts + self.db_deleted_agents = [] + self._db_agents = agents + self.db_deleted_dependencies = [] + self._db_dependencies = dependencies + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmGraph.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmGraph() + if self._db_accounts is not None: + cp._db_accounts = self._db_accounts.do_copy(new_ids, id_scope, id_remap) + if self._db_processes is not None: + cp._db_processes = self._db_processes.do_copy(new_ids, id_scope, id_remap) + if self._db_artifacts is not None: + cp._db_artifacts = self._db_artifacts.do_copy(new_ids, id_scope, id_remap) + if self._db_agents is not None: + cp._db_agents = self._db_agents.do_copy(new_ids, id_scope, id_remap) + if self._db_dependencies is not None: + cp._db_dependencies = self._db_dependencies.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmGraph() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + new_obj.db_accounts = res + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + obj = old_obj.db_accounts + new_obj.db_add_accounts(DBOpmAccounts.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccounts.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'processes' in class_dict: + res = class_dict['processes'](old_obj, trans_dict) + new_obj.db_processes = res + elif hasattr(old_obj, 'db_processes') and old_obj.db_processes is not None: + obj = old_obj.db_processes + new_obj.db_add_processes(DBOpmProcesses.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_processes') and hasattr(new_obj, 'db_deleted_processes'): + for obj in old_obj.db_deleted_processes: + n_obj = DBOpmProcesses.update_version(obj, trans_dict) + new_obj.db_deleted_processes.append(n_obj) + if 'artifacts' in class_dict: + res = class_dict['artifacts'](old_obj, trans_dict) + new_obj.db_artifacts = res + elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None: + obj = old_obj.db_artifacts + new_obj.db_add_artifacts(DBOpmArtifacts.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'): + for obj in old_obj.db_deleted_artifacts: + n_obj = DBOpmArtifacts.update_version(obj, trans_dict) + new_obj.db_deleted_artifacts.append(n_obj) + if 'agents' in class_dict: + res = class_dict['agents'](old_obj, trans_dict) + new_obj.db_agents = res + elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None: + obj = old_obj.db_agents + new_obj.db_add_agents(DBOpmAgents.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'): + for obj in old_obj.db_deleted_agents: + n_obj = DBOpmAgents.update_version(obj, trans_dict) + new_obj.db_deleted_agents.append(n_obj) + if 'dependencies' in class_dict: + res = class_dict['dependencies'](old_obj, trans_dict) + new_obj.db_dependencies = res + elif hasattr(old_obj, 'db_dependencies') and old_obj.db_dependencies is not None: + obj = old_obj.db_dependencies + new_obj.db_add_dependencies(DBOpmDependencies.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_dependencies') and hasattr(new_obj, 'db_deleted_dependencies'): + for obj in old_obj.db_deleted_dependencies: + n_obj = DBOpmDependencies.update_version(obj, trans_dict) + new_obj.db_deleted_dependencies.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_accounts is not None: + children.extend(self._db_accounts.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_accounts = None + if self._db_processes is not None: + children.extend(self._db_processes.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_processes = None + if self._db_artifacts is not None: + children.extend(self._db_artifacts.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_artifacts = None + if self._db_agents is not None: + children.extend(self._db_agents.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_agents = None + if self._db_dependencies is not None: + children.extend(self._db_dependencies.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_dependencies = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_processes) + children.extend(self.db_deleted_artifacts) + children.extend(self.db_deleted_agents) + children.extend(self.db_deleted_dependencies) + if remove: + self.db_deleted_accounts = [] + self.db_deleted_processes = [] + self.db_deleted_artifacts = [] + self.db_deleted_agents = [] + self.db_deleted_dependencies = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_accounts is not None and self._db_accounts.has_changes(): + return True + if self._db_processes is not None and self._db_processes.has_changes(): + return True + if self._db_artifacts is not None and self._db_artifacts.has_changes(): + return True + if self._db_agents is not None and self._db_agents.has_changes(): + return True + if self._db_dependencies is not None and self._db_dependencies.has_changes(): + return True + return False + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_add_accounts(self, accounts): + self._db_accounts = accounts + def db_change_accounts(self, accounts): + self._db_accounts = accounts + def db_delete_accounts(self, accounts): + if not self.is_new: + self.db_deleted_accounts.append(self._db_accounts) + self._db_accounts = None + + def __get_db_processes(self): + return self._db_processes + def __set_db_processes(self, processes): + self._db_processes = processes + self.is_dirty = True + db_processes = property(__get_db_processes, __set_db_processes) + def db_add_processes(self, processes): + self._db_processes = processes + def db_change_processes(self, processes): + self._db_processes = processes + def db_delete_processes(self, processes): + if not self.is_new: + self.db_deleted_processes.append(self._db_processes) + self._db_processes = None + + def __get_db_artifacts(self): + return self._db_artifacts + def __set_db_artifacts(self, artifacts): + self._db_artifacts = artifacts + self.is_dirty = True + db_artifacts = property(__get_db_artifacts, __set_db_artifacts) + def db_add_artifacts(self, artifacts): + self._db_artifacts = artifacts + def db_change_artifacts(self, artifacts): + self._db_artifacts = artifacts + def db_delete_artifacts(self, artifacts): + if not self.is_new: + self.db_deleted_artifacts.append(self._db_artifacts) + self._db_artifacts = None + + def __get_db_agents(self): + return self._db_agents + def __set_db_agents(self, agents): + self._db_agents = agents + self.is_dirty = True + db_agents = property(__get_db_agents, __set_db_agents) + def db_add_agents(self, agents): + self._db_agents = agents + def db_change_agents(self, agents): + self._db_agents = agents + def db_delete_agents(self, agents): + if not self.is_new: + self.db_deleted_agents.append(self._db_agents) + self._db_agents = None + + def __get_db_dependencies(self): + return self._db_dependencies + def __set_db_dependencies(self, dependencies): + self._db_dependencies = dependencies + self.is_dirty = True + db_dependencies = property(__get_db_dependencies, __set_db_dependencies) + def db_add_dependencies(self, dependencies): + self._db_dependencies = dependencies + def db_change_dependencies(self, dependencies): + self._db_dependencies = dependencies + def db_delete_dependencies(self, dependencies): + if not self.is_new: + self.db_deleted_dependencies.append(self._db_dependencies) + self._db_dependencies = None + + + +class DBMashuptrail(object): + + vtType = 'mashuptrail' + + def __init__(self, id=None, name=None, version=None, vtVersion=None, last_modified=None, actions=None, annotations=None, actionAnnotations=None): + self._db_id = id + self._db_name = name + self._db_version = version + self._db_vtVersion = vtVersion + self._db_last_modified = last_modified + self.db_deleted_actions = [] + self.db_actions_id_index = {} + if actions is None: + self._db_actions = [] + else: + self._db_actions = actions + for v in self._db_actions: + self.db_actions_id_index[v.db_id] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.db_deleted_actionAnnotations = [] + self.db_actionAnnotations_id_index = {} + self.db_actionAnnotations_action_id_index = {} + self.db_actionAnnotations_key_index = {} + if actionAnnotations is None: + self._db_actionAnnotations = [] + else: + self._db_actionAnnotations = actionAnnotations + for v in self._db_actionAnnotations: + self.db_actionAnnotations_id_index[v.db_id] = v + self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v + self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashuptrail.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashuptrail(id=self._db_id, + name=self._db_name, + version=self._db_version, + vtVersion=self._db_vtVersion, + last_modified=self._db_last_modified) + if self._db_actions is None: + cp._db_actions = [] + else: + cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_actionAnnotations is None: + cp._db_actionAnnotations = [] + else: + cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations) + cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations) + cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashuptrail() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'vtVersion' in class_dict: + res = class_dict['vtVersion'](old_obj, trans_dict) + new_obj.db_vtVersion = res + elif hasattr(old_obj, 'db_vtVersion') and old_obj.db_vtVersion is not None: + new_obj.db_vtVersion = old_obj.db_vtVersion + if 'last_modified' in class_dict: + res = class_dict['last_modified'](old_obj, trans_dict) + new_obj.db_last_modified = res + elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None: + new_obj.db_last_modified = old_obj.db_last_modified + if 'actions' in class_dict: + res = class_dict['actions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_action(obj) + elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None: + for obj in old_obj.db_actions: + new_obj.db_add_action(DBMashupAction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'): + for obj in old_obj.db_deleted_actions: + n_obj = DBMashupAction.update_version(obj, trans_dict) + new_obj.db_deleted_actions.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'actionAnnotations' in class_dict: + res = class_dict['actionAnnotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_actionAnnotation(obj) + elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None: + for obj in old_obj.db_actionAnnotations: + new_obj.db_add_actionAnnotation(DBMashupActionAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'): + for obj in old_obj.db_deleted_actionAnnotations: + n_obj = DBMashupActionAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_actionAnnotations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_actions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_action(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_actionAnnotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_actionAnnotation(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_actions) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_actionAnnotations) + if remove: + self.db_deleted_actions = [] + self.db_deleted_annotations = [] + self.db_deleted_actionAnnotations = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_actions: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_actionAnnotations: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_vtVersion(self): + return self._db_vtVersion + def __set_db_vtVersion(self, vtVersion): + self._db_vtVersion = vtVersion + self.is_dirty = True + db_vtVersion = property(__get_db_vtVersion, __set_db_vtVersion) + def db_add_vtVersion(self, vtVersion): + self._db_vtVersion = vtVersion + def db_change_vtVersion(self, vtVersion): + self._db_vtVersion = vtVersion + def db_delete_vtVersion(self, vtVersion): + self._db_vtVersion = None + + def __get_db_last_modified(self): + return self._db_last_modified + def __set_db_last_modified(self, last_modified): + self._db_last_modified = last_modified + self.is_dirty = True + db_last_modified = property(__get_db_last_modified, __set_db_last_modified) + def db_add_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_change_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_delete_last_modified(self, last_modified): + self._db_last_modified = None + + def __get_db_actions(self): + return self._db_actions + def __set_db_actions(self, actions): + self._db_actions = actions + self.is_dirty = True + db_actions = property(__get_db_actions, __set_db_actions) + def db_get_actions(self): + return self._db_actions + def db_add_action(self, action): + self.is_dirty = True + self._db_actions.append(action) + self.db_actions_id_index[action.db_id] = action + def db_change_action(self, action): + self.is_dirty = True + found = False + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == action.db_id: + self._db_actions[i] = action + found = True + break + if not found: + self._db_actions.append(action) + self.db_actions_id_index[action.db_id] = action + def db_delete_action(self, action): + self.is_dirty = True + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == action.db_id: + if not self._db_actions[i].is_new: + self.db_deleted_actions.append(self._db_actions[i]) + del self._db_actions[i] + break + del self.db_actions_id_index[action.db_id] + def db_get_action(self, key): + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == key: + return self._db_actions[i] + return None + def db_get_action_by_id(self, key): + return self.db_actions_id_index[key] + def db_has_action_with_id(self, key): + return key in self.db_actions_id_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def __get_db_actionAnnotations(self): + return self._db_actionAnnotations + def __set_db_actionAnnotations(self, actionAnnotations): + self._db_actionAnnotations = actionAnnotations + self.is_dirty = True + db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations) + def db_get_actionAnnotations(self): + return self._db_actionAnnotations + def db_add_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + self._db_actionAnnotations.append(actionAnnotation) + self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation + self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation + self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation + def db_change_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id: + self._db_actionAnnotations[i] = actionAnnotation + found = True + break + if not found: + self._db_actionAnnotations.append(actionAnnotation) + self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation + self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation + self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation + def db_delete_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id: + if not self._db_actionAnnotations[i].is_new: + self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i]) + del self._db_actionAnnotations[i] + break + del self.db_actionAnnotations_id_index[actionAnnotation.db_id] + del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] + try: + del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] + except KeyError: + pass + def db_get_actionAnnotation(self, key): + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == key: + return self._db_actionAnnotations[i] + return None + def db_get_actionAnnotation_by_id(self, key): + return self.db_actionAnnotations_id_index[key] + def db_has_actionAnnotation_with_id(self, key): + return key in self.db_actionAnnotations_id_index + def db_get_actionAnnotation_by_action_id(self, key): + return self.db_actionAnnotations_action_id_index[key] + def db_has_actionAnnotation_with_action_id(self, key): + return key in self.db_actionAnnotations_action_id_index + def db_get_actionAnnotation_by_key(self, key): + return self.db_actionAnnotations_key_index[key] + def db_has_actionAnnotation_with_key(self, key): + return key in self.db_actionAnnotations_key_index + + def getPrimaryKey(self): + return self._db_id + +class DBRegistry(object): + + vtType = 'registry' + + def __init__(self, id=None, entity_type=None, version=None, root_descriptor_id=None, name=None, last_modified=None, packages=None): + self._db_id = id + self._db_entity_type = entity_type + self._db_version = version + self._db_root_descriptor_id = root_descriptor_id + self._db_name = name + self._db_last_modified = last_modified + self.db_deleted_packages = [] + self.db_packages_id_index = {} + self.db_packages_identifier_index = {} + if packages is None: + self._db_packages = [] + else: + self._db_packages = packages + for v in self._db_packages: + self.db_packages_id_index[v.db_id] = v + self.db_packages_identifier_index[(v.db_identifier,v.db_version)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBRegistry.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBRegistry(id=self._db_id, + entity_type=self._db_entity_type, + version=self._db_version, + root_descriptor_id=self._db_root_descriptor_id, + name=self._db_name, + last_modified=self._db_last_modified) + if self._db_packages is None: + cp._db_packages = [] + else: + cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_root_descriptor_id') and ('module_descriptor', self._db_root_descriptor_id) in id_remap: + cp._db_root_descriptor_id = id_remap[('module_descriptor', self._db_root_descriptor_id)] + + # recreate indices and set flags + cp.db_packages_id_index = dict((v.db_id, v) for v in cp._db_packages) + cp.db_packages_identifier_index = dict(((v.db_identifier,v.db_version), v) for v in cp._db_packages) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRegistry() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'entity_type' in class_dict: + res = class_dict['entity_type'](old_obj, trans_dict) + new_obj.db_entity_type = res + elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None: + new_obj.db_entity_type = old_obj.db_entity_type + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'root_descriptor_id' in class_dict: + res = class_dict['root_descriptor_id'](old_obj, trans_dict) + new_obj.db_root_descriptor_id = res + elif hasattr(old_obj, 'db_root_descriptor_id') and old_obj.db_root_descriptor_id is not None: + new_obj.db_root_descriptor_id = old_obj.db_root_descriptor_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'last_modified' in class_dict: + res = class_dict['last_modified'](old_obj, trans_dict) + new_obj.db_last_modified = res + elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None: + new_obj.db_last_modified = old_obj.db_last_modified + if 'packages' in class_dict: + res = class_dict['packages'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_package(obj) + elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None: + for obj in old_obj.db_packages: + new_obj.db_add_package(DBPackage.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'): + for obj in old_obj.db_deleted_packages: + n_obj = DBPackage.update_version(obj, trans_dict) + new_obj.db_deleted_packages.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_packages: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_package(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_packages) + if remove: + self.db_deleted_packages = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_packages: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_entity_type(self): + return self._db_entity_type + def __set_db_entity_type(self, entity_type): + self._db_entity_type = entity_type + self.is_dirty = True + db_entity_type = property(__get_db_entity_type, __set_db_entity_type) + def db_add_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_change_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_delete_entity_type(self, entity_type): + self._db_entity_type = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_root_descriptor_id(self): + return self._db_root_descriptor_id + def __set_db_root_descriptor_id(self, root_descriptor_id): + self._db_root_descriptor_id = root_descriptor_id + self.is_dirty = True + db_root_descriptor_id = property(__get_db_root_descriptor_id, __set_db_root_descriptor_id) + def db_add_root_descriptor_id(self, root_descriptor_id): + self._db_root_descriptor_id = root_descriptor_id + def db_change_root_descriptor_id(self, root_descriptor_id): + self._db_root_descriptor_id = root_descriptor_id + def db_delete_root_descriptor_id(self, root_descriptor_id): + self._db_root_descriptor_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_last_modified(self): + return self._db_last_modified + def __set_db_last_modified(self, last_modified): + self._db_last_modified = last_modified + self.is_dirty = True + db_last_modified = property(__get_db_last_modified, __set_db_last_modified) + def db_add_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_change_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_delete_last_modified(self, last_modified): + self._db_last_modified = None + + def __get_db_packages(self): + return self._db_packages + def __set_db_packages(self, packages): + self._db_packages = packages + self.is_dirty = True + db_packages = property(__get_db_packages, __set_db_packages) + def db_get_packages(self): + return self._db_packages + def db_add_package(self, package): + self.is_dirty = True + self._db_packages.append(package) + self.db_packages_id_index[package.db_id] = package + self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package + def db_change_package(self, package): + self.is_dirty = True + found = False + for i in xrange(len(self._db_packages)): + if self._db_packages[i].db_id == package.db_id: + self._db_packages[i] = package + found = True + break + if not found: + self._db_packages.append(package) + self.db_packages_id_index[package.db_id] = package + self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package + def db_delete_package(self, package): + self.is_dirty = True + for i in xrange(len(self._db_packages)): + if self._db_packages[i].db_id == package.db_id: + if not self._db_packages[i].is_new: + self.db_deleted_packages.append(self._db_packages[i]) + del self._db_packages[i] + break + del self.db_packages_id_index[package.db_id] + del self.db_packages_identifier_index[(package.db_identifier,package.db_version)] + def db_get_package(self, key): + for i in xrange(len(self._db_packages)): + if self._db_packages[i].db_id == key: + return self._db_packages[i] + return None + def db_get_package_by_id(self, key): + return self.db_packages_id_index[key] + def db_has_package_with_id(self, key): + return key in self.db_packages_id_index + def db_get_package_by_identifier(self, key): + return self.db_packages_identifier_index[key] + def db_has_package_with_identifier(self, key): + return key in self.db_packages_identifier_index + + def getPrimaryKey(self): + return self._db_id + +class DBVtConnection(object): + + vtType = 'vt_connection' + + def __init__(self, id=None, vt_source=None, vt_dest=None, vt_source_port=None, vt_dest_port=None, vt_source_signature=None, vt_dest_signature=None): + self._db_id = id + self._db_vt_source = vt_source + self._db_vt_dest = vt_dest + self._db_vt_source_port = vt_source_port + self._db_vt_dest_port = vt_dest_port + self._db_vt_source_signature = vt_source_signature + self._db_vt_dest_signature = vt_dest_signature + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBVtConnection.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBVtConnection(id=self._db_id, + vt_source=self._db_vt_source, + vt_dest=self._db_vt_dest, + vt_source_port=self._db_vt_source_port, + vt_dest_port=self._db_vt_dest_port, + vt_source_signature=self._db_vt_source_signature, + vt_dest_signature=self._db_vt_dest_signature) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBVtConnection() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'vt_source' in class_dict: + res = class_dict['vt_source'](old_obj, trans_dict) + new_obj.db_vt_source = res + elif hasattr(old_obj, 'db_vt_source') and old_obj.db_vt_source is not None: + new_obj.db_vt_source = old_obj.db_vt_source + if 'vt_dest' in class_dict: + res = class_dict['vt_dest'](old_obj, trans_dict) + new_obj.db_vt_dest = res + elif hasattr(old_obj, 'db_vt_dest') and old_obj.db_vt_dest is not None: + new_obj.db_vt_dest = old_obj.db_vt_dest + if 'vt_source_port' in class_dict: + res = class_dict['vt_source_port'](old_obj, trans_dict) + new_obj.db_vt_source_port = res + elif hasattr(old_obj, 'db_vt_source_port') and old_obj.db_vt_source_port is not None: + new_obj.db_vt_source_port = old_obj.db_vt_source_port + if 'vt_dest_port' in class_dict: + res = class_dict['vt_dest_port'](old_obj, trans_dict) + new_obj.db_vt_dest_port = res + elif hasattr(old_obj, 'db_vt_dest_port') and old_obj.db_vt_dest_port is not None: + new_obj.db_vt_dest_port = old_obj.db_vt_dest_port + if 'vt_source_signature' in class_dict: + res = class_dict['vt_source_signature'](old_obj, trans_dict) + new_obj.db_vt_source_signature = res + elif hasattr(old_obj, 'db_vt_source_signature') and old_obj.db_vt_source_signature is not None: + new_obj.db_vt_source_signature = old_obj.db_vt_source_signature + if 'vt_dest_signature' in class_dict: + res = class_dict['vt_dest_signature'](old_obj, trans_dict) + new_obj.db_vt_dest_signature = res + elif hasattr(old_obj, 'db_vt_dest_signature') and old_obj.db_vt_dest_signature is not None: + new_obj.db_vt_dest_signature = old_obj.db_vt_dest_signature + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_vt_source(self): + return self._db_vt_source + def __set_db_vt_source(self, vt_source): + self._db_vt_source = vt_source + self.is_dirty = True + db_vt_source = property(__get_db_vt_source, __set_db_vt_source) + def db_add_vt_source(self, vt_source): + self._db_vt_source = vt_source + def db_change_vt_source(self, vt_source): + self._db_vt_source = vt_source + def db_delete_vt_source(self, vt_source): + self._db_vt_source = None + + def __get_db_vt_dest(self): + return self._db_vt_dest + def __set_db_vt_dest(self, vt_dest): + self._db_vt_dest = vt_dest + self.is_dirty = True + db_vt_dest = property(__get_db_vt_dest, __set_db_vt_dest) + def db_add_vt_dest(self, vt_dest): + self._db_vt_dest = vt_dest + def db_change_vt_dest(self, vt_dest): + self._db_vt_dest = vt_dest + def db_delete_vt_dest(self, vt_dest): + self._db_vt_dest = None + + def __get_db_vt_source_port(self): + return self._db_vt_source_port + def __set_db_vt_source_port(self, vt_source_port): + self._db_vt_source_port = vt_source_port + self.is_dirty = True + db_vt_source_port = property(__get_db_vt_source_port, __set_db_vt_source_port) + def db_add_vt_source_port(self, vt_source_port): + self._db_vt_source_port = vt_source_port + def db_change_vt_source_port(self, vt_source_port): + self._db_vt_source_port = vt_source_port + def db_delete_vt_source_port(self, vt_source_port): + self._db_vt_source_port = None + + def __get_db_vt_dest_port(self): + return self._db_vt_dest_port + def __set_db_vt_dest_port(self, vt_dest_port): + self._db_vt_dest_port = vt_dest_port + self.is_dirty = True + db_vt_dest_port = property(__get_db_vt_dest_port, __set_db_vt_dest_port) + def db_add_vt_dest_port(self, vt_dest_port): + self._db_vt_dest_port = vt_dest_port + def db_change_vt_dest_port(self, vt_dest_port): + self._db_vt_dest_port = vt_dest_port + def db_delete_vt_dest_port(self, vt_dest_port): + self._db_vt_dest_port = None + + def __get_db_vt_source_signature(self): + return self._db_vt_source_signature + def __set_db_vt_source_signature(self, vt_source_signature): + self._db_vt_source_signature = vt_source_signature + self.is_dirty = True + db_vt_source_signature = property(__get_db_vt_source_signature, __set_db_vt_source_signature) + def db_add_vt_source_signature(self, vt_source_signature): + self._db_vt_source_signature = vt_source_signature + def db_change_vt_source_signature(self, vt_source_signature): + self._db_vt_source_signature = vt_source_signature + def db_delete_vt_source_signature(self, vt_source_signature): + self._db_vt_source_signature = None + + def __get_db_vt_dest_signature(self): + return self._db_vt_dest_signature + def __set_db_vt_dest_signature(self, vt_dest_signature): + self._db_vt_dest_signature = vt_dest_signature + self.is_dirty = True + db_vt_dest_signature = property(__get_db_vt_dest_signature, __set_db_vt_dest_signature) + def db_add_vt_dest_signature(self, vt_dest_signature): + self._db_vt_dest_signature = vt_dest_signature + def db_change_vt_dest_signature(self, vt_dest_signature): + self._db_vt_dest_signature = vt_dest_signature + def db_delete_vt_dest_signature(self, vt_dest_signature): + self._db_vt_dest_signature = None + + def getPrimaryKey(self): + return self._db_id + +class DBMashupComponent(object): + + vtType = 'mashup_component' + + def __init__(self, id=None, vtid=None, vttype=None, vtparent_type=None, vtparent_id=None, vtpos=None, vtmid=None, pos=None, type=None, val=None, minVal=None, maxVal=None, stepSize=None, strvaluelist=None, widget=None, seq=None, parent=None): + self._db_id = id + self._db_vtid = vtid + self._db_vttype = vttype + self._db_vtparent_type = vtparent_type + self._db_vtparent_id = vtparent_id + self._db_vtpos = vtpos + self._db_vtmid = vtmid + self._db_pos = pos + self._db_type = type + self._db_val = val + self._db_minVal = minVal + self._db_maxVal = maxVal + self._db_stepSize = stepSize + self._db_strvaluelist = strvaluelist + self._db_widget = widget + self._db_seq = seq + self._db_parent = parent + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashupComponent.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashupComponent(id=self._db_id, + vtid=self._db_vtid, + vttype=self._db_vttype, + vtparent_type=self._db_vtparent_type, + vtparent_id=self._db_vtparent_id, + vtpos=self._db_vtpos, + vtmid=self._db_vtmid, + pos=self._db_pos, + type=self._db_type, + val=self._db_val, + minVal=self._db_minVal, + maxVal=self._db_maxVal, + stepSize=self._db_stepSize, + strvaluelist=self._db_strvaluelist, + widget=self._db_widget, + seq=self._db_seq, + parent=self._db_parent) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashupComponent() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'vtid' in class_dict: + res = class_dict['vtid'](old_obj, trans_dict) + new_obj.db_vtid = res + elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None: + new_obj.db_vtid = old_obj.db_vtid + if 'vttype' in class_dict: + res = class_dict['vttype'](old_obj, trans_dict) + new_obj.db_vttype = res + elif hasattr(old_obj, 'db_vttype') and old_obj.db_vttype is not None: + new_obj.db_vttype = old_obj.db_vttype + if 'vtparent_type' in class_dict: + res = class_dict['vtparent_type'](old_obj, trans_dict) + new_obj.db_vtparent_type = res + elif hasattr(old_obj, 'db_vtparent_type') and old_obj.db_vtparent_type is not None: + new_obj.db_vtparent_type = old_obj.db_vtparent_type + if 'vtparent_id' in class_dict: + res = class_dict['vtparent_id'](old_obj, trans_dict) + new_obj.db_vtparent_id = res + elif hasattr(old_obj, 'db_vtparent_id') and old_obj.db_vtparent_id is not None: + new_obj.db_vtparent_id = old_obj.db_vtparent_id + if 'vtpos' in class_dict: + res = class_dict['vtpos'](old_obj, trans_dict) + new_obj.db_vtpos = res + elif hasattr(old_obj, 'db_vtpos') and old_obj.db_vtpos is not None: + new_obj.db_vtpos = old_obj.db_vtpos + if 'vtmid' in class_dict: + res = class_dict['vtmid'](old_obj, trans_dict) + new_obj.db_vtmid = res + elif hasattr(old_obj, 'db_vtmid') and old_obj.db_vtmid is not None: + new_obj.db_vtmid = old_obj.db_vtmid + if 'pos' in class_dict: + res = class_dict['pos'](old_obj, trans_dict) + new_obj.db_pos = res + elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None: + new_obj.db_pos = old_obj.db_pos + if 'type' in class_dict: + res = class_dict['type'](old_obj, trans_dict) + new_obj.db_type = res + elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None: + new_obj.db_type = old_obj.db_type + if 'val' in class_dict: + res = class_dict['val'](old_obj, trans_dict) + new_obj.db_val = res + elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None: + new_obj.db_val = old_obj.db_val + if 'minVal' in class_dict: + res = class_dict['minVal'](old_obj, trans_dict) + new_obj.db_minVal = res + elif hasattr(old_obj, 'db_minVal') and old_obj.db_minVal is not None: + new_obj.db_minVal = old_obj.db_minVal + if 'maxVal' in class_dict: + res = class_dict['maxVal'](old_obj, trans_dict) + new_obj.db_maxVal = res + elif hasattr(old_obj, 'db_maxVal') and old_obj.db_maxVal is not None: + new_obj.db_maxVal = old_obj.db_maxVal + if 'stepSize' in class_dict: + res = class_dict['stepSize'](old_obj, trans_dict) + new_obj.db_stepSize = res + elif hasattr(old_obj, 'db_stepSize') and old_obj.db_stepSize is not None: + new_obj.db_stepSize = old_obj.db_stepSize + if 'strvaluelist' in class_dict: + res = class_dict['strvaluelist'](old_obj, trans_dict) + new_obj.db_strvaluelist = res + elif hasattr(old_obj, 'db_strvaluelist') and old_obj.db_strvaluelist is not None: + new_obj.db_strvaluelist = old_obj.db_strvaluelist + if 'widget' in class_dict: + res = class_dict['widget'](old_obj, trans_dict) + new_obj.db_widget = res + elif hasattr(old_obj, 'db_widget') and old_obj.db_widget is not None: + new_obj.db_widget = old_obj.db_widget + if 'seq' in class_dict: + res = class_dict['seq'](old_obj, trans_dict) + new_obj.db_seq = res + elif hasattr(old_obj, 'db_seq') and old_obj.db_seq is not None: + new_obj.db_seq = old_obj.db_seq + if 'parent' in class_dict: + res = class_dict['parent'](old_obj, trans_dict) + new_obj.db_parent = res + elif hasattr(old_obj, 'db_parent') and old_obj.db_parent is not None: + new_obj.db_parent = old_obj.db_parent + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_vtid(self): + return self._db_vtid + def __set_db_vtid(self, vtid): + self._db_vtid = vtid + self.is_dirty = True + db_vtid = property(__get_db_vtid, __set_db_vtid) + def db_add_vtid(self, vtid): + self._db_vtid = vtid + def db_change_vtid(self, vtid): + self._db_vtid = vtid + def db_delete_vtid(self, vtid): + self._db_vtid = None + + def __get_db_vttype(self): + return self._db_vttype + def __set_db_vttype(self, vttype): + self._db_vttype = vttype + self.is_dirty = True + db_vttype = property(__get_db_vttype, __set_db_vttype) + def db_add_vttype(self, vttype): + self._db_vttype = vttype + def db_change_vttype(self, vttype): + self._db_vttype = vttype + def db_delete_vttype(self, vttype): + self._db_vttype = None + + def __get_db_vtparent_type(self): + return self._db_vtparent_type + def __set_db_vtparent_type(self, vtparent_type): + self._db_vtparent_type = vtparent_type + self.is_dirty = True + db_vtparent_type = property(__get_db_vtparent_type, __set_db_vtparent_type) + def db_add_vtparent_type(self, vtparent_type): + self._db_vtparent_type = vtparent_type + def db_change_vtparent_type(self, vtparent_type): + self._db_vtparent_type = vtparent_type + def db_delete_vtparent_type(self, vtparent_type): + self._db_vtparent_type = None + + def __get_db_vtparent_id(self): + return self._db_vtparent_id + def __set_db_vtparent_id(self, vtparent_id): + self._db_vtparent_id = vtparent_id + self.is_dirty = True + db_vtparent_id = property(__get_db_vtparent_id, __set_db_vtparent_id) + def db_add_vtparent_id(self, vtparent_id): + self._db_vtparent_id = vtparent_id + def db_change_vtparent_id(self, vtparent_id): + self._db_vtparent_id = vtparent_id + def db_delete_vtparent_id(self, vtparent_id): + self._db_vtparent_id = None + + def __get_db_vtpos(self): + return self._db_vtpos + def __set_db_vtpos(self, vtpos): + self._db_vtpos = vtpos + self.is_dirty = True + db_vtpos = property(__get_db_vtpos, __set_db_vtpos) + def db_add_vtpos(self, vtpos): + self._db_vtpos = vtpos + def db_change_vtpos(self, vtpos): + self._db_vtpos = vtpos + def db_delete_vtpos(self, vtpos): + self._db_vtpos = None + + def __get_db_vtmid(self): + return self._db_vtmid + def __set_db_vtmid(self, vtmid): + self._db_vtmid = vtmid + self.is_dirty = True + db_vtmid = property(__get_db_vtmid, __set_db_vtmid) + def db_add_vtmid(self, vtmid): + self._db_vtmid = vtmid + def db_change_vtmid(self, vtmid): + self._db_vtmid = vtmid + def db_delete_vtmid(self, vtmid): + self._db_vtmid = None + + def __get_db_pos(self): + return self._db_pos + def __set_db_pos(self, pos): + self._db_pos = pos + self.is_dirty = True + db_pos = property(__get_db_pos, __set_db_pos) + def db_add_pos(self, pos): + self._db_pos = pos + def db_change_pos(self, pos): + self._db_pos = pos + def db_delete_pos(self, pos): + self._db_pos = None + + def __get_db_type(self): + return self._db_type + def __set_db_type(self, type): + self._db_type = type + self.is_dirty = True + db_type = property(__get_db_type, __set_db_type) + def db_add_type(self, type): + self._db_type = type + def db_change_type(self, type): + self._db_type = type + def db_delete_type(self, type): + self._db_type = None + + def __get_db_val(self): + return self._db_val + def __set_db_val(self, val): + self._db_val = val + self.is_dirty = True + db_val = property(__get_db_val, __set_db_val) + def db_add_val(self, val): + self._db_val = val + def db_change_val(self, val): + self._db_val = val + def db_delete_val(self, val): + self._db_val = None + + def __get_db_minVal(self): + return self._db_minVal + def __set_db_minVal(self, minVal): + self._db_minVal = minVal + self.is_dirty = True + db_minVal = property(__get_db_minVal, __set_db_minVal) + def db_add_minVal(self, minVal): + self._db_minVal = minVal + def db_change_minVal(self, minVal): + self._db_minVal = minVal + def db_delete_minVal(self, minVal): + self._db_minVal = None + + def __get_db_maxVal(self): + return self._db_maxVal + def __set_db_maxVal(self, maxVal): + self._db_maxVal = maxVal + self.is_dirty = True + db_maxVal = property(__get_db_maxVal, __set_db_maxVal) + def db_add_maxVal(self, maxVal): + self._db_maxVal = maxVal + def db_change_maxVal(self, maxVal): + self._db_maxVal = maxVal + def db_delete_maxVal(self, maxVal): + self._db_maxVal = None + + def __get_db_stepSize(self): + return self._db_stepSize + def __set_db_stepSize(self, stepSize): + self._db_stepSize = stepSize + self.is_dirty = True + db_stepSize = property(__get_db_stepSize, __set_db_stepSize) + def db_add_stepSize(self, stepSize): + self._db_stepSize = stepSize + def db_change_stepSize(self, stepSize): + self._db_stepSize = stepSize + def db_delete_stepSize(self, stepSize): + self._db_stepSize = None + + def __get_db_strvaluelist(self): + return self._db_strvaluelist + def __set_db_strvaluelist(self, strvaluelist): + self._db_strvaluelist = strvaluelist + self.is_dirty = True + db_strvaluelist = property(__get_db_strvaluelist, __set_db_strvaluelist) + def db_add_strvaluelist(self, strvaluelist): + self._db_strvaluelist = strvaluelist + def db_change_strvaluelist(self, strvaluelist): + self._db_strvaluelist = strvaluelist + def db_delete_strvaluelist(self, strvaluelist): + self._db_strvaluelist = None + + def __get_db_widget(self): + return self._db_widget + def __set_db_widget(self, widget): + self._db_widget = widget + self.is_dirty = True + db_widget = property(__get_db_widget, __set_db_widget) + def db_add_widget(self, widget): + self._db_widget = widget + def db_change_widget(self, widget): + self._db_widget = widget + def db_delete_widget(self, widget): + self._db_widget = None + + def __get_db_seq(self): + return self._db_seq + def __set_db_seq(self, seq): + self._db_seq = seq + self.is_dirty = True + db_seq = property(__get_db_seq, __set_db_seq) + def db_add_seq(self, seq): + self._db_seq = seq + def db_change_seq(self, seq): + self._db_seq = seq + def db_delete_seq(self, seq): + self._db_seq = None + + def __get_db_parent(self): + return self._db_parent + def __set_db_parent(self, parent): + self._db_parent = parent + self.is_dirty = True + db_parent = property(__get_db_parent, __set_db_parent) + def db_add_parent(self, parent): + self._db_parent = parent + def db_change_parent(self, parent): + self._db_parent = parent + def db_delete_parent(self, parent): + self._db_parent = None + + def getPrimaryKey(self): + return self._db_id + +class DBProvEntity(object): + + vtType = 'prov_entity' + + def __init__(self, id=None, prov_type=None, prov_label=None, prov_value=None, vt_id=None, vt_type=None, vt_desc=None, vt_package=None, vt_version=None, vt_cache=None, vt_location_x=None, vt_location_y=None, is_part_of=None): + self._db_id = id + self._db_prov_type = prov_type + self._db_prov_label = prov_label + self._db_prov_value = prov_value + self._db_vt_id = vt_id + self._db_vt_type = vt_type + self._db_vt_desc = vt_desc + self._db_vt_package = vt_package + self._db_vt_version = vt_version + self._db_vt_cache = vt_cache + self._db_vt_location_x = vt_location_x + self._db_vt_location_y = vt_location_y + self.db_deleted_is_part_of = [] + self._db_is_part_of = is_part_of + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvEntity.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvEntity(id=self._db_id, + prov_type=self._db_prov_type, + prov_label=self._db_prov_label, + prov_value=self._db_prov_value, + vt_id=self._db_vt_id, + vt_type=self._db_vt_type, + vt_desc=self._db_vt_desc, + vt_package=self._db_vt_package, + vt_version=self._db_vt_version, + vt_cache=self._db_vt_cache, + vt_location_x=self._db_vt_location_x, + vt_location_y=self._db_vt_location_y) + if self._db_is_part_of is not None: + cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvEntity() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'prov_type' in class_dict: + res = class_dict['prov_type'](old_obj, trans_dict) + new_obj.db_prov_type = res + elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None: + new_obj.db_prov_type = old_obj.db_prov_type + if 'prov_label' in class_dict: + res = class_dict['prov_label'](old_obj, trans_dict) + new_obj.db_prov_label = res + elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None: + new_obj.db_prov_label = old_obj.db_prov_label + if 'prov_value' in class_dict: + res = class_dict['prov_value'](old_obj, trans_dict) + new_obj.db_prov_value = res + elif hasattr(old_obj, 'db_prov_value') and old_obj.db_prov_value is not None: + new_obj.db_prov_value = old_obj.db_prov_value + if 'vt_id' in class_dict: + res = class_dict['vt_id'](old_obj, trans_dict) + new_obj.db_vt_id = res + elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None: + new_obj.db_vt_id = old_obj.db_vt_id + if 'vt_type' in class_dict: + res = class_dict['vt_type'](old_obj, trans_dict) + new_obj.db_vt_type = res + elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None: + new_obj.db_vt_type = old_obj.db_vt_type + if 'vt_desc' in class_dict: + res = class_dict['vt_desc'](old_obj, trans_dict) + new_obj.db_vt_desc = res + elif hasattr(old_obj, 'db_vt_desc') and old_obj.db_vt_desc is not None: + new_obj.db_vt_desc = old_obj.db_vt_desc + if 'vt_package' in class_dict: + res = class_dict['vt_package'](old_obj, trans_dict) + new_obj.db_vt_package = res + elif hasattr(old_obj, 'db_vt_package') and old_obj.db_vt_package is not None: + new_obj.db_vt_package = old_obj.db_vt_package + if 'vt_version' in class_dict: + res = class_dict['vt_version'](old_obj, trans_dict) + new_obj.db_vt_version = res + elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None: + new_obj.db_vt_version = old_obj.db_vt_version + if 'vt_cache' in class_dict: + res = class_dict['vt_cache'](old_obj, trans_dict) + new_obj.db_vt_cache = res + elif hasattr(old_obj, 'db_vt_cache') and old_obj.db_vt_cache is not None: + new_obj.db_vt_cache = old_obj.db_vt_cache + if 'vt_location_x' in class_dict: + res = class_dict['vt_location_x'](old_obj, trans_dict) + new_obj.db_vt_location_x = res + elif hasattr(old_obj, 'db_vt_location_x') and old_obj.db_vt_location_x is not None: + new_obj.db_vt_location_x = old_obj.db_vt_location_x + if 'vt_location_y' in class_dict: + res = class_dict['vt_location_y'](old_obj, trans_dict) + new_obj.db_vt_location_y = res + elif hasattr(old_obj, 'db_vt_location_y') and old_obj.db_vt_location_y is not None: + new_obj.db_vt_location_y = old_obj.db_vt_location_y + if 'is_part_of' in class_dict: + res = class_dict['is_part_of'](old_obj, trans_dict) + new_obj.db_is_part_of = res + elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None: + obj = old_obj.db_is_part_of + new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'): + for obj in old_obj.db_deleted_is_part_of: + n_obj = DBIsPartOf.update_version(obj, trans_dict) + new_obj.db_deleted_is_part_of.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_is_part_of is not None: + children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_is_part_of = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_is_part_of) + if remove: + self.db_deleted_is_part_of = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_is_part_of is not None and self._db_is_part_of.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_prov_type(self): + return self._db_prov_type + def __set_db_prov_type(self, prov_type): + self._db_prov_type = prov_type + self.is_dirty = True + db_prov_type = property(__get_db_prov_type, __set_db_prov_type) + def db_add_prov_type(self, prov_type): + self._db_prov_type = prov_type + def db_change_prov_type(self, prov_type): + self._db_prov_type = prov_type + def db_delete_prov_type(self, prov_type): + self._db_prov_type = None + + def __get_db_prov_label(self): + return self._db_prov_label + def __set_db_prov_label(self, prov_label): + self._db_prov_label = prov_label + self.is_dirty = True + db_prov_label = property(__get_db_prov_label, __set_db_prov_label) + def db_add_prov_label(self, prov_label): + self._db_prov_label = prov_label + def db_change_prov_label(self, prov_label): + self._db_prov_label = prov_label + def db_delete_prov_label(self, prov_label): + self._db_prov_label = None + + def __get_db_prov_value(self): + return self._db_prov_value + def __set_db_prov_value(self, prov_value): + self._db_prov_value = prov_value + self.is_dirty = True + db_prov_value = property(__get_db_prov_value, __set_db_prov_value) + def db_add_prov_value(self, prov_value): + self._db_prov_value = prov_value + def db_change_prov_value(self, prov_value): + self._db_prov_value = prov_value + def db_delete_prov_value(self, prov_value): + self._db_prov_value = None + + def __get_db_vt_id(self): + return self._db_vt_id + def __set_db_vt_id(self, vt_id): + self._db_vt_id = vt_id + self.is_dirty = True + db_vt_id = property(__get_db_vt_id, __set_db_vt_id) + def db_add_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_change_vt_id(self, vt_id): + self._db_vt_id = vt_id + def db_delete_vt_id(self, vt_id): + self._db_vt_id = None + + def __get_db_vt_type(self): + return self._db_vt_type + def __set_db_vt_type(self, vt_type): + self._db_vt_type = vt_type + self.is_dirty = True + db_vt_type = property(__get_db_vt_type, __set_db_vt_type) + def db_add_vt_type(self, vt_type): + self._db_vt_type = vt_type + def db_change_vt_type(self, vt_type): + self._db_vt_type = vt_type + def db_delete_vt_type(self, vt_type): + self._db_vt_type = None + + def __get_db_vt_desc(self): + return self._db_vt_desc + def __set_db_vt_desc(self, vt_desc): + self._db_vt_desc = vt_desc + self.is_dirty = True + db_vt_desc = property(__get_db_vt_desc, __set_db_vt_desc) + def db_add_vt_desc(self, vt_desc): + self._db_vt_desc = vt_desc + def db_change_vt_desc(self, vt_desc): + self._db_vt_desc = vt_desc + def db_delete_vt_desc(self, vt_desc): + self._db_vt_desc = None + + def __get_db_vt_package(self): + return self._db_vt_package + def __set_db_vt_package(self, vt_package): + self._db_vt_package = vt_package + self.is_dirty = True + db_vt_package = property(__get_db_vt_package, __set_db_vt_package) + def db_add_vt_package(self, vt_package): + self._db_vt_package = vt_package + def db_change_vt_package(self, vt_package): + self._db_vt_package = vt_package + def db_delete_vt_package(self, vt_package): + self._db_vt_package = None + + def __get_db_vt_version(self): + return self._db_vt_version + def __set_db_vt_version(self, vt_version): + self._db_vt_version = vt_version + self.is_dirty = True + db_vt_version = property(__get_db_vt_version, __set_db_vt_version) + def db_add_vt_version(self, vt_version): + self._db_vt_version = vt_version + def db_change_vt_version(self, vt_version): + self._db_vt_version = vt_version + def db_delete_vt_version(self, vt_version): + self._db_vt_version = None + + def __get_db_vt_cache(self): + return self._db_vt_cache + def __set_db_vt_cache(self, vt_cache): + self._db_vt_cache = vt_cache + self.is_dirty = True + db_vt_cache = property(__get_db_vt_cache, __set_db_vt_cache) + def db_add_vt_cache(self, vt_cache): + self._db_vt_cache = vt_cache + def db_change_vt_cache(self, vt_cache): + self._db_vt_cache = vt_cache + def db_delete_vt_cache(self, vt_cache): + self._db_vt_cache = None + + def __get_db_vt_location_x(self): + return self._db_vt_location_x + def __set_db_vt_location_x(self, vt_location_x): + self._db_vt_location_x = vt_location_x + self.is_dirty = True + db_vt_location_x = property(__get_db_vt_location_x, __set_db_vt_location_x) + def db_add_vt_location_x(self, vt_location_x): + self._db_vt_location_x = vt_location_x + def db_change_vt_location_x(self, vt_location_x): + self._db_vt_location_x = vt_location_x + def db_delete_vt_location_x(self, vt_location_x): + self._db_vt_location_x = None + + def __get_db_vt_location_y(self): + return self._db_vt_location_y + def __set_db_vt_location_y(self, vt_location_y): + self._db_vt_location_y = vt_location_y + self.is_dirty = True + db_vt_location_y = property(__get_db_vt_location_y, __set_db_vt_location_y) + def db_add_vt_location_y(self, vt_location_y): + self._db_vt_location_y = vt_location_y + def db_change_vt_location_y(self, vt_location_y): + self._db_vt_location_y = vt_location_y + def db_delete_vt_location_y(self, vt_location_y): + self._db_vt_location_y = None + + def __get_db_is_part_of(self): + return self._db_is_part_of + def __set_db_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + self.is_dirty = True + db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of) + def db_add_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + def db_change_is_part_of(self, is_part_of): + self._db_is_part_of = is_part_of + def db_delete_is_part_of(self, is_part_of): + if not self.is_new: + self.db_deleted_is_part_of.append(self._db_is_part_of) + self._db_is_part_of = None + + def getPrimaryKey(self): + return self._db_id + +class DBAnnotation(object): + + vtType = 'annotation' + + def __init__(self, id=None, key=None, value=None): + self._db_id = id + self._db_key = key + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBAnnotation.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBAnnotation(id=self._db_id, + key=self._db_key, + value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBAnnotation() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'key' in class_dict: + res = class_dict['key'](old_obj, trans_dict) + new_obj.db_key = res + elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None: + new_obj.db_key = old_obj.db_key + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_key(self): + return self._db_key + def __set_db_key(self, key): + self._db_key = key + self.is_dirty = True + db_key = property(__get_db_key, __set_db_key) + def db_add_key(self, key): + self._db_key = key + def db_change_key(self, key): + self._db_key = key + def db_delete_key(self, key): + self._db_key = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def getPrimaryKey(self): + return self._db_id + +class DBChange(object): + + vtType = 'change' + + def __init__(self, data=None, id=None, what=None, oldObjId=None, newObjId=None, parentObjId=None, parentObjType=None): + self.db_deleted_data = [] + self._db_data = data + self._db_id = id + self._db_what = what + self._db_oldObjId = oldObjId + self._db_newObjId = newObjId + self._db_parentObjId = parentObjId + self._db_parentObjType = parentObjType + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBChange.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBChange(id=self._db_id, + what=self._db_what, + oldObjId=self._db_oldObjId, + newObjId=self._db_newObjId, + parentObjId=self._db_parentObjId, + parentObjType=self._db_parentObjType) + if self._db_data is not None: + cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_oldObjId') and (self._db_what, self._db_oldObjId) in id_remap: + cp._db_oldObjId = id_remap[(self._db_what, self._db_oldObjId)] + if hasattr(self, 'db_newObjId') and (self._db_what, self._db_newObjId) in id_remap: + cp._db_newObjId = id_remap[(self._db_what, self._db_newObjId)] + if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap: + cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBChange() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'data' in class_dict: + res = class_dict['data'](old_obj, trans_dict) + new_obj.db_data = res + elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None: + obj = old_obj.db_data + if obj.vtType == 'module': + new_obj.db_add_data(DBModule.update_version(obj, trans_dict)) + elif obj.vtType == 'location': + new_obj.db_add_data(DBLocation.update_version(obj, trans_dict)) + elif obj.vtType == 'annotation': + new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict)) + elif obj.vtType == 'controlParameter': + new_obj.db_add_data(DBControlParameter.update_version(obj, trans_dict)) + elif obj.vtType == 'function': + new_obj.db_add_data(DBFunction.update_version(obj, trans_dict)) + elif obj.vtType == 'connection': + new_obj.db_add_data(DBConnection.update_version(obj, trans_dict)) + elif obj.vtType == 'port': + new_obj.db_add_data(DBPort.update_version(obj, trans_dict)) + elif obj.vtType == 'parameter': + new_obj.db_add_data(DBParameter.update_version(obj, trans_dict)) + elif obj.vtType == 'portSpec': + new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict)) + elif obj.vtType == 'abstraction': + new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict)) + elif obj.vtType == 'group': + new_obj.db_add_data(DBGroup.update_version(obj, trans_dict)) + elif obj.vtType == 'other': + new_obj.db_add_data(DBOther.update_version(obj, trans_dict)) + elif obj.vtType == 'plugin_data': + new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'): + for obj in old_obj.db_deleted_data: + if obj.vtType == 'module': + n_obj = DBModule.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'location': + n_obj = DBLocation.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'annotation': + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'controlParameter': + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'function': + n_obj = DBFunction.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'connection': + n_obj = DBConnection.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'port': + n_obj = DBPort.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'parameter': + n_obj = DBParameter.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'portSpec': + n_obj = DBPortSpec.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'abstraction': + n_obj = DBAbstraction.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'group': + n_obj = DBGroup.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'other': + n_obj = DBOther.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + elif obj.vtType == 'plugin_data': + n_obj = DBPluginData.update_version(obj, trans_dict) + new_obj.db_deleted_data.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'what' in class_dict: + res = class_dict['what'](old_obj, trans_dict) + new_obj.db_what = res + elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None: + new_obj.db_what = old_obj.db_what + if 'oldObjId' in class_dict: + res = class_dict['oldObjId'](old_obj, trans_dict) + new_obj.db_oldObjId = res + elif hasattr(old_obj, 'db_oldObjId') and old_obj.db_oldObjId is not None: + new_obj.db_oldObjId = old_obj.db_oldObjId + if 'newObjId' in class_dict: + res = class_dict['newObjId'](old_obj, trans_dict) + new_obj.db_newObjId = res + elif hasattr(old_obj, 'db_newObjId') and old_obj.db_newObjId is not None: + new_obj.db_newObjId = old_obj.db_newObjId + if 'parentObjId' in class_dict: + res = class_dict['parentObjId'](old_obj, trans_dict) + new_obj.db_parentObjId = res + elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None: + new_obj.db_parentObjId = old_obj.db_parentObjId + if 'parentObjType' in class_dict: + res = class_dict['parentObjType'](old_obj, trans_dict) + new_obj.db_parentObjType = res + elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None: + new_obj.db_parentObjType = old_obj.db_parentObjType + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_data is not None: + children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_data = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_data) + if remove: + self.db_deleted_data = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_data is not None and self._db_data.has_changes(): + return True + return False + def __get_db_data(self): + return self._db_data + def __set_db_data(self, data): + self._db_data = data + self.is_dirty = True + db_data = property(__get_db_data, __set_db_data) + def db_add_data(self, data): + self._db_data = data + def db_change_data(self, data): + self._db_data = data + def db_delete_data(self, data): + if not self.is_new: + self.db_deleted_data.append(self._db_data) + self._db_data = None + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_what(self): + return self._db_what + def __set_db_what(self, what): + self._db_what = what + self.is_dirty = True + db_what = property(__get_db_what, __set_db_what) + def db_add_what(self, what): + self._db_what = what + def db_change_what(self, what): + self._db_what = what + def db_delete_what(self, what): + self._db_what = None + + def __get_db_oldObjId(self): + return self._db_oldObjId + def __set_db_oldObjId(self, oldObjId): + self._db_oldObjId = oldObjId + self.is_dirty = True + db_oldObjId = property(__get_db_oldObjId, __set_db_oldObjId) + def db_add_oldObjId(self, oldObjId): + self._db_oldObjId = oldObjId + def db_change_oldObjId(self, oldObjId): + self._db_oldObjId = oldObjId + def db_delete_oldObjId(self, oldObjId): + self._db_oldObjId = None + + def __get_db_newObjId(self): + return self._db_newObjId + def __set_db_newObjId(self, newObjId): + self._db_newObjId = newObjId + self.is_dirty = True + db_newObjId = property(__get_db_newObjId, __set_db_newObjId) + def db_add_newObjId(self, newObjId): + self._db_newObjId = newObjId + def db_change_newObjId(self, newObjId): + self._db_newObjId = newObjId + def db_delete_newObjId(self, newObjId): + self._db_newObjId = None + + def __get_db_parentObjId(self): + return self._db_parentObjId + def __set_db_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + self.is_dirty = True + db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId) + def db_add_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_change_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_delete_parentObjId(self, parentObjId): + self._db_parentObjId = None + + def __get_db_parentObjType(self): + return self._db_parentObjType + def __set_db_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + self.is_dirty = True + db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType) + def db_add_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_change_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_delete_parentObjType(self, parentObjType): + self._db_parentObjType = None + + def getPrimaryKey(self): + return self._db_id + +class DBOpmWasDerivedFrom(object): + + vtType = 'opm_was_derived_from' + + def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None): + self.db_deleted_effect = [] + self._db_effect = effect + self.db_deleted_role = [] + self._db_role = role + self.db_deleted_cause = [] + self._db_cause = cause + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.db_deleted_opm_times = [] + if opm_times is None: + self._db_opm_times = [] + else: + self._db_opm_times = opm_times + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmWasDerivedFrom.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmWasDerivedFrom() + if self._db_effect is not None: + cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap) + if self._db_role is not None: + cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap) + if self._db_cause is not None: + cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_opm_times is None: + cp._db_opm_times = [] + else: + cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmWasDerivedFrom() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'effect' in class_dict: + res = class_dict['effect'](old_obj, trans_dict) + new_obj.db_effect = res + elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None: + obj = old_obj.db_effect + new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'): + for obj in old_obj.db_deleted_effect: + n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict) + new_obj.db_deleted_effect.append(n_obj) + if 'role' in class_dict: + res = class_dict['role'](old_obj, trans_dict) + new_obj.db_role = res + elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None: + obj = old_obj.db_role + new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'): + for obj in old_obj.db_deleted_role: + n_obj = DBOpmRole.update_version(obj, trans_dict) + new_obj.db_deleted_role.append(n_obj) + if 'cause' in class_dict: + res = class_dict['cause'](old_obj, trans_dict) + new_obj.db_cause = res + elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None: + obj = old_obj.db_cause + new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'): + for obj in old_obj.db_deleted_cause: + n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict) + new_obj.db_deleted_cause.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'opm_times' in class_dict: + res = class_dict['opm_times'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_time(obj) + elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None: + for obj in old_obj.db_opm_times: + new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'): + for obj in old_obj.db_deleted_opm_times: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_opm_times.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_effect is not None: + children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_effect = None + if self._db_role is not None: + children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_role = None + if self._db_cause is not None: + children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_cause = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_opm_times: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_time(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_effect) + children.extend(self.db_deleted_role) + children.extend(self.db_deleted_cause) + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_opm_times) + if remove: + self.db_deleted_effect = [] + self.db_deleted_role = [] + self.db_deleted_cause = [] + self.db_deleted_accounts = [] + self.db_deleted_opm_times = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_effect is not None and self._db_effect.has_changes(): + return True + if self._db_role is not None and self._db_role.has_changes(): + return True + if self._db_cause is not None and self._db_cause.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_opm_times: + if child.has_changes(): + return True + return False + def __get_db_effect(self): + return self._db_effect + def __set_db_effect(self, effect): + self._db_effect = effect + self.is_dirty = True + db_effect = property(__get_db_effect, __set_db_effect) + def db_add_effect(self, effect): + self._db_effect = effect + def db_change_effect(self, effect): + self._db_effect = effect + def db_delete_effect(self, effect): + if not self.is_new: + self.db_deleted_effect.append(self._db_effect) + self._db_effect = None + + def __get_db_role(self): + return self._db_role + def __set_db_role(self, role): + self._db_role = role + self.is_dirty = True + db_role = property(__get_db_role, __set_db_role) + def db_add_role(self, role): + self._db_role = role + def db_change_role(self, role): + self._db_role = role + def db_delete_role(self, role): + if not self.is_new: + self.db_deleted_role.append(self._db_role) + self._db_role = None + + def __get_db_cause(self): + return self._db_cause + def __set_db_cause(self, cause): + self._db_cause = cause + self.is_dirty = True + db_cause = property(__get_db_cause, __set_db_cause) + def db_add_cause(self, cause): + self._db_cause = cause + def db_change_cause(self, cause): + self._db_cause = cause + def db_delete_cause(self, cause): + if not self.is_new: + self.db_deleted_cause.append(self._db_cause) + self._db_cause = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def __get_db_opm_times(self): + return self._db_opm_times + def __set_db_opm_times(self, opm_times): + self._db_opm_times = opm_times + self.is_dirty = True + db_opm_times = property(__get_db_opm_times, __set_db_opm_times) + def db_get_opm_times(self): + return self._db_opm_times + def db_add_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_change_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_delete_opm_time(self, opm_time): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_time(self, key): + return None + + + +class DBOpmArtifacts(object): + + vtType = 'opm_artifacts' + + def __init__(self, artifacts=None): + self.db_deleted_artifacts = [] + self.db_artifacts_id_index = {} + if artifacts is None: + self._db_artifacts = [] + else: + self._db_artifacts = artifacts + for v in self._db_artifacts: + self.db_artifacts_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmArtifacts.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmArtifacts() + if self._db_artifacts is None: + cp._db_artifacts = [] + else: + cp._db_artifacts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_artifacts] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_artifacts_id_index = dict((v.db_id, v) for v in cp._db_artifacts) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmArtifacts() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'artifacts' in class_dict: + res = class_dict['artifacts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_artifact(obj) + elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None: + for obj in old_obj.db_artifacts: + new_obj.db_add_artifact(DBOpmArtifact.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'): + for obj in old_obj.db_deleted_artifacts: + n_obj = DBOpmArtifact.update_version(obj, trans_dict) + new_obj.db_deleted_artifacts.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_artifacts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_artifact(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_artifacts) + if remove: + self.db_deleted_artifacts = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_artifacts: + if child.has_changes(): + return True + return False + def __get_db_artifacts(self): + return self._db_artifacts + def __set_db_artifacts(self, artifacts): + self._db_artifacts = artifacts + self.is_dirty = True + db_artifacts = property(__get_db_artifacts, __set_db_artifacts) + def db_get_artifacts(self): + return self._db_artifacts + def db_add_artifact(self, artifact): + self.is_dirty = True + self._db_artifacts.append(artifact) + self.db_artifacts_id_index[artifact.db_id] = artifact + def db_change_artifact(self, artifact): + self.is_dirty = True + found = False + for i in xrange(len(self._db_artifacts)): + if self._db_artifacts[i].db_id == artifact.db_id: + self._db_artifacts[i] = artifact + found = True + break + if not found: + self._db_artifacts.append(artifact) + self.db_artifacts_id_index[artifact.db_id] = artifact + def db_delete_artifact(self, artifact): + self.is_dirty = True + for i in xrange(len(self._db_artifacts)): + if self._db_artifacts[i].db_id == artifact.db_id: + if not self._db_artifacts[i].is_new: + self.db_deleted_artifacts.append(self._db_artifacts[i]) + del self._db_artifacts[i] + break + del self.db_artifacts_id_index[artifact.db_id] + def db_get_artifact(self, key): + for i in xrange(len(self._db_artifacts)): + if self._db_artifacts[i].db_id == key: + return self._db_artifacts[i] + return None + def db_get_artifact_by_id(self, key): + return self.db_artifacts_id_index[key] + def db_has_artifact_with_id(self, key): + return key in self.db_artifacts_id_index + + + +class DBOpmWasControlledBy(object): + + vtType = 'opm_was_controlled_by' + + def __init__(self, effect=None, role=None, cause=None, accounts=None, starts=None, ends=None): + self.db_deleted_effect = [] + self._db_effect = effect + self.db_deleted_role = [] + self._db_role = role + self.db_deleted_cause = [] + self._db_cause = cause + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.db_deleted_starts = [] + if starts is None: + self._db_starts = [] + else: + self._db_starts = starts + self.db_deleted_ends = [] + if ends is None: + self._db_ends = [] + else: + self._db_ends = ends + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmWasControlledBy.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmWasControlledBy() + if self._db_effect is not None: + cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap) + if self._db_role is not None: + cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap) + if self._db_cause is not None: + cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_starts is None: + cp._db_starts = [] + else: + cp._db_starts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_starts] + if self._db_ends is None: + cp._db_ends = [] + else: + cp._db_ends = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ends] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmWasControlledBy() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'effect' in class_dict: + res = class_dict['effect'](old_obj, trans_dict) + new_obj.db_effect = res + elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None: + obj = old_obj.db_effect + new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'): + for obj in old_obj.db_deleted_effect: + n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict) + new_obj.db_deleted_effect.append(n_obj) + if 'role' in class_dict: + res = class_dict['role'](old_obj, trans_dict) + new_obj.db_role = res + elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None: + obj = old_obj.db_role + new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'): + for obj in old_obj.db_deleted_role: + n_obj = DBOpmRole.update_version(obj, trans_dict) + new_obj.db_deleted_role.append(n_obj) + if 'cause' in class_dict: + res = class_dict['cause'](old_obj, trans_dict) + new_obj.db_cause = res + elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None: + obj = old_obj.db_cause + new_obj.db_add_cause(DBOpmAgentId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'): + for obj in old_obj.db_deleted_cause: + n_obj = DBOpmAgentId.update_version(obj, trans_dict) + new_obj.db_deleted_cause.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'starts' in class_dict: + res = class_dict['starts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_start(obj) + elif hasattr(old_obj, 'db_starts') and old_obj.db_starts is not None: + for obj in old_obj.db_starts: + new_obj.db_add_start(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_starts') and hasattr(new_obj, 'db_deleted_starts'): + for obj in old_obj.db_deleted_starts: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_starts.append(n_obj) + if 'ends' in class_dict: + res = class_dict['ends'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_end(obj) + elif hasattr(old_obj, 'db_ends') and old_obj.db_ends is not None: + for obj in old_obj.db_ends: + new_obj.db_add_end(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_ends') and hasattr(new_obj, 'db_deleted_ends'): + for obj in old_obj.db_deleted_ends: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_ends.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_effect is not None: + children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_effect = None + if self._db_role is not None: + children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_role = None + if self._db_cause is not None: + children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_cause = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_starts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_start(child) + to_del = [] + for child in self.db_ends: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_end(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_effect) + children.extend(self.db_deleted_role) + children.extend(self.db_deleted_cause) + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_starts) + children.extend(self.db_deleted_ends) + if remove: + self.db_deleted_effect = [] + self.db_deleted_role = [] + self.db_deleted_cause = [] + self.db_deleted_accounts = [] + self.db_deleted_starts = [] + self.db_deleted_ends = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_effect is not None and self._db_effect.has_changes(): + return True + if self._db_role is not None and self._db_role.has_changes(): + return True + if self._db_cause is not None and self._db_cause.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_starts: + if child.has_changes(): + return True + for child in self._db_ends: + if child.has_changes(): + return True + return False + def __get_db_effect(self): + return self._db_effect + def __set_db_effect(self, effect): + self._db_effect = effect + self.is_dirty = True + db_effect = property(__get_db_effect, __set_db_effect) + def db_add_effect(self, effect): + self._db_effect = effect + def db_change_effect(self, effect): + self._db_effect = effect + def db_delete_effect(self, effect): + if not self.is_new: + self.db_deleted_effect.append(self._db_effect) + self._db_effect = None + + def __get_db_role(self): + return self._db_role + def __set_db_role(self, role): + self._db_role = role + self.is_dirty = True + db_role = property(__get_db_role, __set_db_role) + def db_add_role(self, role): + self._db_role = role + def db_change_role(self, role): + self._db_role = role + def db_delete_role(self, role): + if not self.is_new: + self.db_deleted_role.append(self._db_role) + self._db_role = None + + def __get_db_cause(self): + return self._db_cause + def __set_db_cause(self, cause): + self._db_cause = cause + self.is_dirty = True + db_cause = property(__get_db_cause, __set_db_cause) + def db_add_cause(self, cause): + self._db_cause = cause + def db_change_cause(self, cause): + self._db_cause = cause + def db_delete_cause(self, cause): + if not self.is_new: + self.db_deleted_cause.append(self._db_cause) + self._db_cause = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def __get_db_starts(self): + return self._db_starts + def __set_db_starts(self, starts): + self._db_starts = starts + self.is_dirty = True + db_starts = property(__get_db_starts, __set_db_starts) + def db_get_starts(self): + return self._db_starts + def db_add_start(self, start): + self.is_dirty = True + self._db_starts.append(start) + def db_change_start(self, start): + self.is_dirty = True + self._db_starts.append(start) + def db_delete_start(self, start): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_start(self, key): + return None + + def __get_db_ends(self): + return self._db_ends + def __set_db_ends(self, ends): + self._db_ends = ends + self.is_dirty = True + db_ends = property(__get_db_ends, __set_db_ends) + def db_get_ends(self): + return self._db_ends + def db_add_end(self, end): + self.is_dirty = True + self._db_ends.append(end) + def db_change_end(self, end): + self.is_dirty = True + self._db_ends.append(end) + def db_delete_end(self, end): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_end(self, key): + return None + + + +class DBOpmAgentId(object): + + vtType = 'opm_agent_id' + + def __init__(self, id=None): + self._db_id = id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAgentId.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAgentId(id=self._db_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_id') and ('opm_agent', self._db_id) in id_remap: + cp._db_id = id_remap[('opm_agent', self._db_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAgentId() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + + +class DBGroupExec(object): + + vtType = 'group_exec' + + def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, group_name=None, group_type=None, completed=None, error=None, machine_id=None, annotations=None): + self.db_deleted_item_execs = [] + self.db_item_execs_id_index = {} + if item_execs is None: + self._db_item_execs = [] + else: + self._db_item_execs = item_execs + for v in self._db_item_execs: + self.db_item_execs_id_index[v.db_id] = v + self._db_id = id + self._db_ts_start = ts_start + self._db_ts_end = ts_end + self._db_cached = cached + self._db_module_id = module_id + self._db_group_name = group_name + self._db_group_type = group_type + self._db_completed = completed + self._db_error = error + self._db_machine_id = machine_id + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBGroupExec.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBGroupExec(id=self._db_id, + ts_start=self._db_ts_start, + ts_end=self._db_ts_end, + cached=self._db_cached, + module_id=self._db_module_id, + group_name=self._db_group_name, + group_type=self._db_group_type, + completed=self._db_completed, + error=self._db_error, + machine_id=self._db_machine_id) + if self._db_item_execs is None: + cp._db_item_execs = [] + else: + cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap: + cp._db_module_id = id_remap[('module', self._db_module_id)] + if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap: + cp._db_machine_id = id_remap[('machine', self._db_machine_id)] + + # recreate indices and set flags + cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBGroupExec() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'item_execs' in class_dict: + res = class_dict['item_execs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_item_exec(obj) + elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None: + for obj in old_obj.db_item_execs: + if obj.vtType == 'module_exec': + new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict)) + elif obj.vtType == 'group_exec': + new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict)) + elif obj.vtType == 'loop_exec': + new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'): + for obj in old_obj.db_deleted_item_execs: + if obj.vtType == 'module_exec': + n_obj = DBModuleExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'group_exec': + n_obj = DBGroupExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'loop_exec': + n_obj = DBLoopExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'ts_start' in class_dict: + res = class_dict['ts_start'](old_obj, trans_dict) + new_obj.db_ts_start = res + elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None: + new_obj.db_ts_start = old_obj.db_ts_start + if 'ts_end' in class_dict: + res = class_dict['ts_end'](old_obj, trans_dict) + new_obj.db_ts_end = res + elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None: + new_obj.db_ts_end = old_obj.db_ts_end + if 'cached' in class_dict: + res = class_dict['cached'](old_obj, trans_dict) + new_obj.db_cached = res + elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None: + new_obj.db_cached = old_obj.db_cached + if 'module_id' in class_dict: + res = class_dict['module_id'](old_obj, trans_dict) + new_obj.db_module_id = res + elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None: + new_obj.db_module_id = old_obj.db_module_id + if 'group_name' in class_dict: + res = class_dict['group_name'](old_obj, trans_dict) + new_obj.db_group_name = res + elif hasattr(old_obj, 'db_group_name') and old_obj.db_group_name is not None: + new_obj.db_group_name = old_obj.db_group_name + if 'group_type' in class_dict: + res = class_dict['group_type'](old_obj, trans_dict) + new_obj.db_group_type = res + elif hasattr(old_obj, 'db_group_type') and old_obj.db_group_type is not None: + new_obj.db_group_type = old_obj.db_group_type + if 'completed' in class_dict: + res = class_dict['completed'](old_obj, trans_dict) + new_obj.db_completed = res + elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None: + new_obj.db_completed = old_obj.db_completed + if 'error' in class_dict: + res = class_dict['error'](old_obj, trans_dict) + new_obj.db_error = res + elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None: + new_obj.db_error = old_obj.db_error + if 'machine_id' in class_dict: + res = class_dict['machine_id'](old_obj, trans_dict) + new_obj.db_machine_id = res + elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None: + new_obj.db_machine_id = old_obj.db_machine_id + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_item_execs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_item_exec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_item_execs) + if remove: + self.db_deleted_annotations = [] + self.db_deleted_item_execs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_item_execs: + if child.has_changes(): + return True + return False + def __get_db_item_execs(self): + return self._db_item_execs + def __set_db_item_execs(self, item_execs): + self._db_item_execs = item_execs + self.is_dirty = True + db_item_execs = property(__get_db_item_execs, __set_db_item_execs) + def db_get_item_execs(self): + return self._db_item_execs + def db_add_item_exec(self, item_exec): + self.is_dirty = True + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_change_item_exec(self, item_exec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + self._db_item_execs[i] = item_exec + found = True + break + if not found: + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_delete_item_exec(self, item_exec): + self.is_dirty = True + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + if not self._db_item_execs[i].is_new: + self.db_deleted_item_execs.append(self._db_item_execs[i]) + del self._db_item_execs[i] + break + del self.db_item_execs_id_index[item_exec.db_id] + def db_get_item_exec(self, key): + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == key: + return self._db_item_execs[i] + return None + def db_get_item_exec_by_id(self, key): + return self.db_item_execs_id_index[key] + def db_has_item_exec_with_id(self, key): + return key in self.db_item_execs_id_index + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_ts_start(self): + return self._db_ts_start + def __set_db_ts_start(self, ts_start): + self._db_ts_start = ts_start + self.is_dirty = True + db_ts_start = property(__get_db_ts_start, __set_db_ts_start) + def db_add_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_change_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_delete_ts_start(self, ts_start): + self._db_ts_start = None + + def __get_db_ts_end(self): + return self._db_ts_end + def __set_db_ts_end(self, ts_end): + self._db_ts_end = ts_end + self.is_dirty = True + db_ts_end = property(__get_db_ts_end, __set_db_ts_end) + def db_add_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_change_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_delete_ts_end(self, ts_end): + self._db_ts_end = None + + def __get_db_cached(self): + return self._db_cached + def __set_db_cached(self, cached): + self._db_cached = cached + self.is_dirty = True + db_cached = property(__get_db_cached, __set_db_cached) + def db_add_cached(self, cached): + self._db_cached = cached + def db_change_cached(self, cached): + self._db_cached = cached + def db_delete_cached(self, cached): + self._db_cached = None + + def __get_db_module_id(self): + return self._db_module_id + def __set_db_module_id(self, module_id): + self._db_module_id = module_id + self.is_dirty = True + db_module_id = property(__get_db_module_id, __set_db_module_id) + def db_add_module_id(self, module_id): + self._db_module_id = module_id + def db_change_module_id(self, module_id): + self._db_module_id = module_id + def db_delete_module_id(self, module_id): + self._db_module_id = None + + def __get_db_group_name(self): + return self._db_group_name + def __set_db_group_name(self, group_name): + self._db_group_name = group_name + self.is_dirty = True + db_group_name = property(__get_db_group_name, __set_db_group_name) + def db_add_group_name(self, group_name): + self._db_group_name = group_name + def db_change_group_name(self, group_name): + self._db_group_name = group_name + def db_delete_group_name(self, group_name): + self._db_group_name = None + + def __get_db_group_type(self): + return self._db_group_type + def __set_db_group_type(self, group_type): + self._db_group_type = group_type + self.is_dirty = True + db_group_type = property(__get_db_group_type, __set_db_group_type) + def db_add_group_type(self, group_type): + self._db_group_type = group_type + def db_change_group_type(self, group_type): + self._db_group_type = group_type + def db_delete_group_type(self, group_type): + self._db_group_type = None + + def __get_db_completed(self): + return self._db_completed + def __set_db_completed(self, completed): + self._db_completed = completed + self.is_dirty = True + db_completed = property(__get_db_completed, __set_db_completed) + def db_add_completed(self, completed): + self._db_completed = completed + def db_change_completed(self, completed): + self._db_completed = completed + def db_delete_completed(self, completed): + self._db_completed = None + + def __get_db_error(self): + return self._db_error + def __set_db_error(self, error): + self._db_error = error + self.is_dirty = True + db_error = property(__get_db_error, __set_db_error) + def db_add_error(self, error): + self._db_error = error + def db_change_error(self, error): + self._db_error = error + def db_delete_error(self, error): + self._db_error = None + + def __get_db_machine_id(self): + return self._db_machine_id + def __set_db_machine_id(self, machine_id): + self._db_machine_id = machine_id + self.is_dirty = True + db_machine_id = property(__get_db_machine_id, __set_db_machine_id) + def db_add_machine_id(self, machine_id): + self._db_machine_id = machine_id + def db_change_machine_id(self, machine_id): + self._db_machine_id = machine_id + def db_delete_machine_id(self, machine_id): + self._db_machine_id = None + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBOpmTime(object): + + vtType = 'opm_time' + + def __init__(self, no_later_than=None, no_earlier_than=None, clock_id=None): + self._db_no_later_than = no_later_than + self._db_no_earlier_than = no_earlier_than + self._db_clock_id = clock_id + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmTime.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmTime(no_later_than=self._db_no_later_than, + no_earlier_than=self._db_no_earlier_than, + clock_id=self._db_clock_id) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmTime() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'no_later_than' in class_dict: + res = class_dict['no_later_than'](old_obj, trans_dict) + new_obj.db_no_later_than = res + elif hasattr(old_obj, 'db_no_later_than') and old_obj.db_no_later_than is not None: + new_obj.db_no_later_than = old_obj.db_no_later_than + if 'no_earlier_than' in class_dict: + res = class_dict['no_earlier_than'](old_obj, trans_dict) + new_obj.db_no_earlier_than = res + elif hasattr(old_obj, 'db_no_earlier_than') and old_obj.db_no_earlier_than is not None: + new_obj.db_no_earlier_than = old_obj.db_no_earlier_than + if 'clock_id' in class_dict: + res = class_dict['clock_id'](old_obj, trans_dict) + new_obj.db_clock_id = res + elif hasattr(old_obj, 'db_clock_id') and old_obj.db_clock_id is not None: + new_obj.db_clock_id = old_obj.db_clock_id + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_no_later_than(self): + return self._db_no_later_than + def __set_db_no_later_than(self, no_later_than): + self._db_no_later_than = no_later_than + self.is_dirty = True + db_no_later_than = property(__get_db_no_later_than, __set_db_no_later_than) + def db_add_no_later_than(self, no_later_than): + self._db_no_later_than = no_later_than + def db_change_no_later_than(self, no_later_than): + self._db_no_later_than = no_later_than + def db_delete_no_later_than(self, no_later_than): + self._db_no_later_than = None + + def __get_db_no_earlier_than(self): + return self._db_no_earlier_than + def __set_db_no_earlier_than(self, no_earlier_than): + self._db_no_earlier_than = no_earlier_than + self.is_dirty = True + db_no_earlier_than = property(__get_db_no_earlier_than, __set_db_no_earlier_than) + def db_add_no_earlier_than(self, no_earlier_than): + self._db_no_earlier_than = no_earlier_than + def db_change_no_earlier_than(self, no_earlier_than): + self._db_no_earlier_than = no_earlier_than + def db_delete_no_earlier_than(self, no_earlier_than): + self._db_no_earlier_than = None + + def __get_db_clock_id(self): + return self._db_clock_id + def __set_db_clock_id(self, clock_id): + self._db_clock_id = clock_id + self.is_dirty = True + db_clock_id = property(__get_db_clock_id, __set_db_clock_id) + def db_add_clock_id(self, clock_id): + self._db_clock_id = clock_id + def db_change_clock_id(self, clock_id): + self._db_clock_id = clock_id + def db_delete_clock_id(self, clock_id): + self._db_clock_id = None + + + +class DBPackage(object): + + vtType = 'package' + + def __init__(self, id=None, name=None, identifier=None, codepath=None, load_configuration=None, version=None, description=None, module_descriptors=None): + self._db_id = id + self._db_name = name + self._db_identifier = identifier + self._db_codepath = codepath + self._db_load_configuration = load_configuration + self._db_version = version + self._db_description = description + self.db_deleted_module_descriptors = [] + self.db_module_descriptors_id_index = {} + self.db_module_descriptors_name_index = {} + if module_descriptors is None: + self._db_module_descriptors = [] + else: + self._db_module_descriptors = module_descriptors + for v in self._db_module_descriptors: + self.db_module_descriptors_id_index[v.db_id] = v + self.db_module_descriptors_name_index[(v.db_name,v.db_namespace,v.db_version)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPackage.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPackage(id=self._db_id, + name=self._db_name, + identifier=self._db_identifier, + codepath=self._db_codepath, + load_configuration=self._db_load_configuration, + version=self._db_version, + description=self._db_description) + if self._db_module_descriptors is None: + cp._db_module_descriptors = [] + else: + cp._db_module_descriptors = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_descriptors] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_module_descriptors_id_index = dict((v.db_id, v) for v in cp._db_module_descriptors) + cp.db_module_descriptors_name_index = dict(((v.db_name,v.db_namespace,v.db_version), v) for v in cp._db_module_descriptors) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPackage() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'identifier' in class_dict: + res = class_dict['identifier'](old_obj, trans_dict) + new_obj.db_identifier = res + elif hasattr(old_obj, 'db_identifier') and old_obj.db_identifier is not None: + new_obj.db_identifier = old_obj.db_identifier + if 'codepath' in class_dict: + res = class_dict['codepath'](old_obj, trans_dict) + new_obj.db_codepath = res + elif hasattr(old_obj, 'db_codepath') and old_obj.db_codepath is not None: + new_obj.db_codepath = old_obj.db_codepath + if 'load_configuration' in class_dict: + res = class_dict['load_configuration'](old_obj, trans_dict) + new_obj.db_load_configuration = res + elif hasattr(old_obj, 'db_load_configuration') and old_obj.db_load_configuration is not None: + new_obj.db_load_configuration = old_obj.db_load_configuration + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'description' in class_dict: + res = class_dict['description'](old_obj, trans_dict) + new_obj.db_description = res + elif hasattr(old_obj, 'db_description') and old_obj.db_description is not None: + new_obj.db_description = old_obj.db_description + if 'module_descriptors' in class_dict: + res = class_dict['module_descriptors'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_module_descriptor(obj) + elif hasattr(old_obj, 'db_module_descriptors') and old_obj.db_module_descriptors is not None: + for obj in old_obj.db_module_descriptors: + new_obj.db_add_module_descriptor(DBModuleDescriptor.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_module_descriptors') and hasattr(new_obj, 'db_deleted_module_descriptors'): + for obj in old_obj.db_deleted_module_descriptors: + n_obj = DBModuleDescriptor.update_version(obj, trans_dict) + new_obj.db_deleted_module_descriptors.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_module_descriptors: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_module_descriptor(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_module_descriptors) + if remove: + self.db_deleted_module_descriptors = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_module_descriptors: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_identifier(self): + return self._db_identifier + def __set_db_identifier(self, identifier): + self._db_identifier = identifier + self.is_dirty = True + db_identifier = property(__get_db_identifier, __set_db_identifier) + def db_add_identifier(self, identifier): + self._db_identifier = identifier + def db_change_identifier(self, identifier): + self._db_identifier = identifier + def db_delete_identifier(self, identifier): + self._db_identifier = None + + def __get_db_codepath(self): + return self._db_codepath + def __set_db_codepath(self, codepath): + self._db_codepath = codepath + self.is_dirty = True + db_codepath = property(__get_db_codepath, __set_db_codepath) + def db_add_codepath(self, codepath): + self._db_codepath = codepath + def db_change_codepath(self, codepath): + self._db_codepath = codepath + def db_delete_codepath(self, codepath): + self._db_codepath = None + + def __get_db_load_configuration(self): + return self._db_load_configuration + def __set_db_load_configuration(self, load_configuration): + self._db_load_configuration = load_configuration + self.is_dirty = True + db_load_configuration = property(__get_db_load_configuration, __set_db_load_configuration) + def db_add_load_configuration(self, load_configuration): + self._db_load_configuration = load_configuration + def db_change_load_configuration(self, load_configuration): + self._db_load_configuration = load_configuration + def db_delete_load_configuration(self, load_configuration): + self._db_load_configuration = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_description(self): + return self._db_description + def __set_db_description(self, description): + self._db_description = description + self.is_dirty = True + db_description = property(__get_db_description, __set_db_description) + def db_add_description(self, description): + self._db_description = description + def db_change_description(self, description): + self._db_description = description + def db_delete_description(self, description): + self._db_description = None + + def __get_db_module_descriptors(self): + return self._db_module_descriptors + def __set_db_module_descriptors(self, module_descriptors): + self._db_module_descriptors = module_descriptors + self.is_dirty = True + db_module_descriptors = property(__get_db_module_descriptors, __set_db_module_descriptors) + def db_get_module_descriptors(self): + return self._db_module_descriptors + def db_add_module_descriptor(self, module_descriptor): + self.is_dirty = True + self._db_module_descriptors.append(module_descriptor) + self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor + self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor + def db_change_module_descriptor(self, module_descriptor): + self.is_dirty = True + found = False + for i in xrange(len(self._db_module_descriptors)): + if self._db_module_descriptors[i].db_id == module_descriptor.db_id: + self._db_module_descriptors[i] = module_descriptor + found = True + break + if not found: + self._db_module_descriptors.append(module_descriptor) + self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor + self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor + def db_delete_module_descriptor(self, module_descriptor): + self.is_dirty = True + for i in xrange(len(self._db_module_descriptors)): + if self._db_module_descriptors[i].db_id == module_descriptor.db_id: + if not self._db_module_descriptors[i].is_new: + self.db_deleted_module_descriptors.append(self._db_module_descriptors[i]) + del self._db_module_descriptors[i] + break + del self.db_module_descriptors_id_index[module_descriptor.db_id] + del self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] + def db_get_module_descriptor(self, key): + for i in xrange(len(self._db_module_descriptors)): + if self._db_module_descriptors[i].db_id == key: + return self._db_module_descriptors[i] + return None + def db_get_module_descriptor_by_id(self, key): + return self.db_module_descriptors_id_index[key] + def db_has_module_descriptor_with_id(self, key): + return key in self.db_module_descriptors_id_index + def db_get_module_descriptor_by_name(self, key): + return self.db_module_descriptors_name_index[key] + def db_has_module_descriptor_with_name(self, key): + return key in self.db_module_descriptors_name_index + + def getPrimaryKey(self): + return self._db_id + +class DBWorkflowExec(object): + + vtType = 'workflow_exec' + + def __init__(self, item_execs=None, id=None, user=None, ip=None, session=None, vt_version=None, ts_start=None, ts_end=None, parent_id=None, parent_type=None, parent_version=None, completed=None, name=None, annotations=None, machines=None): + self.db_deleted_item_execs = [] + self.db_item_execs_id_index = {} + if item_execs is None: + self._db_item_execs = [] + else: + self._db_item_execs = item_execs + for v in self._db_item_execs: + self.db_item_execs_id_index[v.db_id] = v + self._db_id = id + self._db_user = user + self._db_ip = ip + self._db_session = session + self._db_vt_version = vt_version + self._db_ts_start = ts_start + self._db_ts_end = ts_end + self._db_parent_id = parent_id + self._db_parent_type = parent_type + self._db_parent_version = parent_version + self._db_completed = completed + self._db_name = name + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_deleted_machines = [] + self.db_machines_id_index = {} + if machines is None: + self._db_machines = [] + else: + self._db_machines = machines + for v in self._db_machines: + self.db_machines_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBWorkflowExec.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBWorkflowExec(id=self._db_id, + user=self._db_user, + ip=self._db_ip, + session=self._db_session, + vt_version=self._db_vt_version, + ts_start=self._db_ts_start, + ts_end=self._db_ts_end, + parent_id=self._db_parent_id, + parent_type=self._db_parent_type, + parent_version=self._db_parent_version, + completed=self._db_completed, + name=self._db_name) + if self._db_item_execs is None: + cp._db_item_execs = [] + else: + cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_machines is None: + cp._db_machines = [] + else: + cp._db_machines = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_machines] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_machines_id_index = dict((v.db_id, v) for v in cp._db_machines) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBWorkflowExec() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'item_execs' in class_dict: + res = class_dict['item_execs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_item_exec(obj) + elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None: + for obj in old_obj.db_item_execs: + if obj.vtType == 'module_exec': + new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict)) + elif obj.vtType == 'group_exec': + new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict)) + elif obj.vtType == 'loop_exec': + new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'): + for obj in old_obj.db_deleted_item_execs: + if obj.vtType == 'module_exec': + n_obj = DBModuleExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'group_exec': + n_obj = DBGroupExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + elif obj.vtType == 'loop_exec': + n_obj = DBLoopExec.update_version(obj, trans_dict) + new_obj.db_deleted_item_execs.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + if 'ip' in class_dict: + res = class_dict['ip'](old_obj, trans_dict) + new_obj.db_ip = res + elif hasattr(old_obj, 'db_ip') and old_obj.db_ip is not None: + new_obj.db_ip = old_obj.db_ip + if 'session' in class_dict: + res = class_dict['session'](old_obj, trans_dict) + new_obj.db_session = res + elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None: + new_obj.db_session = old_obj.db_session + if 'vt_version' in class_dict: + res = class_dict['vt_version'](old_obj, trans_dict) + new_obj.db_vt_version = res + elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None: + new_obj.db_vt_version = old_obj.db_vt_version + if 'ts_start' in class_dict: + res = class_dict['ts_start'](old_obj, trans_dict) + new_obj.db_ts_start = res + elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None: + new_obj.db_ts_start = old_obj.db_ts_start + if 'ts_end' in class_dict: + res = class_dict['ts_end'](old_obj, trans_dict) + new_obj.db_ts_end = res + elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None: + new_obj.db_ts_end = old_obj.db_ts_end + if 'parent_id' in class_dict: + res = class_dict['parent_id'](old_obj, trans_dict) + new_obj.db_parent_id = res + elif hasattr(old_obj, 'db_parent_id') and old_obj.db_parent_id is not None: + new_obj.db_parent_id = old_obj.db_parent_id + if 'parent_type' in class_dict: + res = class_dict['parent_type'](old_obj, trans_dict) + new_obj.db_parent_type = res + elif hasattr(old_obj, 'db_parent_type') and old_obj.db_parent_type is not None: + new_obj.db_parent_type = old_obj.db_parent_type + if 'parent_version' in class_dict: + res = class_dict['parent_version'](old_obj, trans_dict) + new_obj.db_parent_version = res + elif hasattr(old_obj, 'db_parent_version') and old_obj.db_parent_version is not None: + new_obj.db_parent_version = old_obj.db_parent_version + if 'completed' in class_dict: + res = class_dict['completed'](old_obj, trans_dict) + new_obj.db_completed = res + elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None: + new_obj.db_completed = old_obj.db_completed + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'machines' in class_dict: + res = class_dict['machines'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_machine(obj) + elif hasattr(old_obj, 'db_machines') and old_obj.db_machines is not None: + for obj in old_obj.db_machines: + new_obj.db_add_machine(DBMachine.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_machines') and hasattr(new_obj, 'db_deleted_machines'): + for obj in old_obj.db_deleted_machines: + n_obj = DBMachine.update_version(obj, trans_dict) + new_obj.db_deleted_machines.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_machines: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_machine(child) + to_del = [] + for child in self.db_item_execs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_item_exec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_machines) + children.extend(self.db_deleted_item_execs) + if remove: + self.db_deleted_annotations = [] + self.db_deleted_machines = [] + self.db_deleted_item_execs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_machines: + if child.has_changes(): + return True + for child in self._db_item_execs: + if child.has_changes(): + return True + return False + def __get_db_item_execs(self): + return self._db_item_execs + def __set_db_item_execs(self, item_execs): + self._db_item_execs = item_execs + self.is_dirty = True + db_item_execs = property(__get_db_item_execs, __set_db_item_execs) + def db_get_item_execs(self): + return self._db_item_execs + def db_add_item_exec(self, item_exec): + self.is_dirty = True + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_change_item_exec(self, item_exec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + self._db_item_execs[i] = item_exec + found = True + break + if not found: + self._db_item_execs.append(item_exec) + self.db_item_execs_id_index[item_exec.db_id] = item_exec + def db_delete_item_exec(self, item_exec): + self.is_dirty = True + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == item_exec.db_id: + if not self._db_item_execs[i].is_new: + self.db_deleted_item_execs.append(self._db_item_execs[i]) + del self._db_item_execs[i] + break + del self.db_item_execs_id_index[item_exec.db_id] + def db_get_item_exec(self, key): + for i in xrange(len(self._db_item_execs)): + if self._db_item_execs[i].db_id == key: + return self._db_item_execs[i] + return None + def db_get_item_exec_by_id(self, key): + return self.db_item_execs_id_index[key] + def db_has_item_exec_with_id(self, key): + return key in self.db_item_execs_id_index + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def __get_db_ip(self): + return self._db_ip + def __set_db_ip(self, ip): + self._db_ip = ip + self.is_dirty = True + db_ip = property(__get_db_ip, __set_db_ip) + def db_add_ip(self, ip): + self._db_ip = ip + def db_change_ip(self, ip): + self._db_ip = ip + def db_delete_ip(self, ip): + self._db_ip = None + + def __get_db_session(self): + return self._db_session + def __set_db_session(self, session): + self._db_session = session + self.is_dirty = True + db_session = property(__get_db_session, __set_db_session) + def db_add_session(self, session): + self._db_session = session + def db_change_session(self, session): + self._db_session = session + def db_delete_session(self, session): + self._db_session = None + + def __get_db_vt_version(self): + return self._db_vt_version + def __set_db_vt_version(self, vt_version): + self._db_vt_version = vt_version + self.is_dirty = True + db_vt_version = property(__get_db_vt_version, __set_db_vt_version) + def db_add_vt_version(self, vt_version): + self._db_vt_version = vt_version + def db_change_vt_version(self, vt_version): + self._db_vt_version = vt_version + def db_delete_vt_version(self, vt_version): + self._db_vt_version = None + + def __get_db_ts_start(self): + return self._db_ts_start + def __set_db_ts_start(self, ts_start): + self._db_ts_start = ts_start + self.is_dirty = True + db_ts_start = property(__get_db_ts_start, __set_db_ts_start) + def db_add_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_change_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_delete_ts_start(self, ts_start): + self._db_ts_start = None + + def __get_db_ts_end(self): + return self._db_ts_end + def __set_db_ts_end(self, ts_end): + self._db_ts_end = ts_end + self.is_dirty = True + db_ts_end = property(__get_db_ts_end, __set_db_ts_end) + def db_add_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_change_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_delete_ts_end(self, ts_end): + self._db_ts_end = None + + def __get_db_parent_id(self): + return self._db_parent_id + def __set_db_parent_id(self, parent_id): + self._db_parent_id = parent_id + self.is_dirty = True + db_parent_id = property(__get_db_parent_id, __set_db_parent_id) + def db_add_parent_id(self, parent_id): + self._db_parent_id = parent_id + def db_change_parent_id(self, parent_id): + self._db_parent_id = parent_id + def db_delete_parent_id(self, parent_id): + self._db_parent_id = None + + def __get_db_parent_type(self): + return self._db_parent_type + def __set_db_parent_type(self, parent_type): + self._db_parent_type = parent_type + self.is_dirty = True + db_parent_type = property(__get_db_parent_type, __set_db_parent_type) + def db_add_parent_type(self, parent_type): + self._db_parent_type = parent_type + def db_change_parent_type(self, parent_type): + self._db_parent_type = parent_type + def db_delete_parent_type(self, parent_type): + self._db_parent_type = None + + def __get_db_parent_version(self): + return self._db_parent_version + def __set_db_parent_version(self, parent_version): + self._db_parent_version = parent_version + self.is_dirty = True + db_parent_version = property(__get_db_parent_version, __set_db_parent_version) + def db_add_parent_version(self, parent_version): + self._db_parent_version = parent_version + def db_change_parent_version(self, parent_version): + self._db_parent_version = parent_version + def db_delete_parent_version(self, parent_version): + self._db_parent_version = None + + def __get_db_completed(self): + return self._db_completed + def __set_db_completed(self, completed): + self._db_completed = completed + self.is_dirty = True + db_completed = property(__get_db_completed, __set_db_completed) + def db_add_completed(self, completed): + self._db_completed = completed + def db_change_completed(self, completed): + self._db_completed = completed + def db_delete_completed(self, completed): + self._db_completed = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + + def __get_db_machines(self): + return self._db_machines + def __set_db_machines(self, machines): + self._db_machines = machines + self.is_dirty = True + db_machines = property(__get_db_machines, __set_db_machines) + def db_get_machines(self): + return self._db_machines + def db_add_machine(self, machine): + self.is_dirty = True + self._db_machines.append(machine) + self.db_machines_id_index[machine.db_id] = machine + def db_change_machine(self, machine): + self.is_dirty = True + found = False + for i in xrange(len(self._db_machines)): + if self._db_machines[i].db_id == machine.db_id: + self._db_machines[i] = machine + found = True + break + if not found: + self._db_machines.append(machine) + self.db_machines_id_index[machine.db_id] = machine + def db_delete_machine(self, machine): + self.is_dirty = True + for i in xrange(len(self._db_machines)): + if self._db_machines[i].db_id == machine.db_id: + if not self._db_machines[i].is_new: + self.db_deleted_machines.append(self._db_machines[i]) + del self._db_machines[i] + break + del self.db_machines_id_index[machine.db_id] + def db_get_machine(self, key): + for i in xrange(len(self._db_machines)): + if self._db_machines[i].db_id == key: + return self._db_machines[i] + return None + def db_get_machine_by_id(self, key): + return self.db_machines_id_index[key] + def db_has_machine_with_id(self, key): + return key in self.db_machines_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBParameterExploration(object): + + vtType = 'parameter_exploration' + + def __init__(self, id=None, action_id=None, name=None, date=None, user=None, dims=None, layout=None, functions=None): + self._db_id = id + self._db_action_id = action_id + self._db_name = name + self._db_date = date + self._db_user = user + self._db_dims = dims + self._db_layout = layout + self.db_deleted_functions = [] + self.db_functions_id_index = {} + if functions is None: + self._db_functions = [] + else: + self._db_functions = functions + for v in self._db_functions: + self.db_functions_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBParameterExploration.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBParameterExploration(id=self._db_id, + action_id=self._db_action_id, + name=self._db_name, + date=self._db_date, + user=self._db_user, + dims=self._db_dims, + layout=self._db_layout) + if self._db_functions is None: + cp._db_functions = [] + else: + cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap: + cp._db_action_id = id_remap[('action', self._db_action_id)] + + # recreate indices and set flags + cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBParameterExploration() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'action_id' in class_dict: + res = class_dict['action_id'](old_obj, trans_dict) + new_obj.db_action_id = res + elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None: + new_obj.db_action_id = old_obj.db_action_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'date' in class_dict: + res = class_dict['date'](old_obj, trans_dict) + new_obj.db_date = res + elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None: + new_obj.db_date = old_obj.db_date + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + if 'dims' in class_dict: + res = class_dict['dims'](old_obj, trans_dict) + new_obj.db_dims = res + elif hasattr(old_obj, 'db_dims') and old_obj.db_dims is not None: + new_obj.db_dims = old_obj.db_dims + if 'layout' in class_dict: + res = class_dict['layout'](old_obj, trans_dict) + new_obj.db_layout = res + elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None: + new_obj.db_layout = old_obj.db_layout + if 'functions' in class_dict: + res = class_dict['functions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_function(obj) + elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None: + for obj in old_obj.db_functions: + new_obj.db_add_function(DBPEFunction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'): + for obj in old_obj.db_deleted_functions: + n_obj = DBPEFunction.update_version(obj, trans_dict) + new_obj.db_deleted_functions.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_functions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_function(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_functions) + if remove: + self.db_deleted_functions = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_functions: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_action_id(self): + return self._db_action_id + def __set_db_action_id(self, action_id): + self._db_action_id = action_id + self.is_dirty = True + db_action_id = property(__get_db_action_id, __set_db_action_id) + def db_add_action_id(self, action_id): + self._db_action_id = action_id + def db_change_action_id(self, action_id): + self._db_action_id = action_id + def db_delete_action_id(self, action_id): + self._db_action_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_date(self): + return self._db_date + def __set_db_date(self, date): + self._db_date = date + self.is_dirty = True + db_date = property(__get_db_date, __set_db_date) + def db_add_date(self, date): + self._db_date = date + def db_change_date(self, date): + self._db_date = date + def db_delete_date(self, date): + self._db_date = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def __get_db_dims(self): + return self._db_dims + def __set_db_dims(self, dims): + self._db_dims = dims + self.is_dirty = True + db_dims = property(__get_db_dims, __set_db_dims) + def db_add_dims(self, dims): + self._db_dims = dims + def db_change_dims(self, dims): + self._db_dims = dims + def db_delete_dims(self, dims): + self._db_dims = None + + def __get_db_layout(self): + return self._db_layout + def __set_db_layout(self, layout): + self._db_layout = layout + self.is_dirty = True + db_layout = property(__get_db_layout, __set_db_layout) + def db_add_layout(self, layout): + self._db_layout = layout + def db_change_layout(self, layout): + self._db_layout = layout + def db_delete_layout(self, layout): + self._db_layout = None + + def __get_db_functions(self): + return self._db_functions + def __set_db_functions(self, functions): + self._db_functions = functions + self.is_dirty = True + db_functions = property(__get_db_functions, __set_db_functions) + def db_get_functions(self): + return self._db_functions + def db_add_function(self, function): + self.is_dirty = True + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_change_function(self, function): + self.is_dirty = True + found = False + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + self._db_functions[i] = function + found = True + break + if not found: + self._db_functions.append(function) + self.db_functions_id_index[function.db_id] = function + def db_delete_function(self, function): + self.is_dirty = True + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == function.db_id: + if not self._db_functions[i].is_new: + self.db_deleted_functions.append(self._db_functions[i]) + del self._db_functions[i] + break + del self.db_functions_id_index[function.db_id] + def db_get_function(self, key): + for i in xrange(len(self._db_functions)): + if self._db_functions[i].db_id == key: + return self._db_functions[i] + return None + def db_get_function_by_id(self, key): + return self.db_functions_id_index[key] + def db_has_function_with_id(self, key): + return key in self.db_functions_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBLoopExec(object): + + vtType = 'loop_exec' + + def __init__(self, id=None, ts_start=None, ts_end=None, loop_iterations=None): + self._db_id = id + self._db_ts_start = ts_start + self._db_ts_end = ts_end + self.db_deleted_loop_iterations = [] + self.db_loop_iterations_id_index = {} + if loop_iterations is None: + self._db_loop_iterations = [] + else: + self._db_loop_iterations = loop_iterations + for v in self._db_loop_iterations: + self.db_loop_iterations_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBLoopExec.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBLoopExec(id=self._db_id, + ts_start=self._db_ts_start, + ts_end=self._db_ts_end) + if self._db_loop_iterations is None: + cp._db_loop_iterations = [] + else: + cp._db_loop_iterations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_iterations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_loop_iterations_id_index = dict((v.db_id, v) for v in cp._db_loop_iterations) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBLoopExec() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'ts_start' in class_dict: + res = class_dict['ts_start'](old_obj, trans_dict) + new_obj.db_ts_start = res + elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None: + new_obj.db_ts_start = old_obj.db_ts_start + if 'ts_end' in class_dict: + res = class_dict['ts_end'](old_obj, trans_dict) + new_obj.db_ts_end = res + elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None: + new_obj.db_ts_end = old_obj.db_ts_end + if 'loop_iterations' in class_dict: + res = class_dict['loop_iterations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_loop_iteration(obj) + elif hasattr(old_obj, 'db_loop_iterations') and old_obj.db_loop_iterations is not None: + for obj in old_obj.db_loop_iterations: + new_obj.db_add_loop_iteration(DBLoopIteration.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_loop_iterations') and hasattr(new_obj, 'db_deleted_loop_iterations'): + for obj in old_obj.db_deleted_loop_iterations: + n_obj = DBLoopIteration.update_version(obj, trans_dict) + new_obj.db_deleted_loop_iterations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_loop_iterations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_loop_iteration(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_loop_iterations) + if remove: + self.db_deleted_loop_iterations = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_loop_iterations: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_ts_start(self): + return self._db_ts_start + def __set_db_ts_start(self, ts_start): + self._db_ts_start = ts_start + self.is_dirty = True + db_ts_start = property(__get_db_ts_start, __set_db_ts_start) + def db_add_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_change_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_delete_ts_start(self, ts_start): + self._db_ts_start = None + + def __get_db_ts_end(self): + return self._db_ts_end + def __set_db_ts_end(self, ts_end): + self._db_ts_end = ts_end + self.is_dirty = True + db_ts_end = property(__get_db_ts_end, __set_db_ts_end) + def db_add_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_change_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_delete_ts_end(self, ts_end): + self._db_ts_end = None + + def __get_db_loop_iterations(self): + return self._db_loop_iterations + def __set_db_loop_iterations(self, loop_iterations): + self._db_loop_iterations = loop_iterations + self.is_dirty = True + db_loop_iterations = property(__get_db_loop_iterations, __set_db_loop_iterations) + def db_get_loop_iterations(self): + return self._db_loop_iterations + def db_add_loop_iteration(self, loop_iteration): + self.is_dirty = True + self._db_loop_iterations.append(loop_iteration) + self.db_loop_iterations_id_index[loop_iteration.db_id] = loop_iteration + def db_change_loop_iteration(self, loop_iteration): + self.is_dirty = True + found = False + for i in xrange(len(self._db_loop_iterations)): + if self._db_loop_iterations[i].db_id == loop_iteration.db_id: + self._db_loop_iterations[i] = loop_iteration + found = True + break + if not found: + self._db_loop_iterations.append(loop_iteration) + self.db_loop_iterations_id_index[loop_iteration.db_id] = loop_iteration + def db_delete_loop_iteration(self, loop_iteration): + self.is_dirty = True + for i in xrange(len(self._db_loop_iterations)): + if self._db_loop_iterations[i].db_id == loop_iteration.db_id: + if not self._db_loop_iterations[i].is_new: + self.db_deleted_loop_iterations.append(self._db_loop_iterations[i]) + del self._db_loop_iterations[i] + break + del self.db_loop_iterations_id_index[loop_iteration.db_id] + def db_get_loop_iteration(self, key): + for i in xrange(len(self._db_loop_iterations)): + if self._db_loop_iterations[i].db_id == key: + return self._db_loop_iterations[i] + return None + def db_get_loop_iteration_by_id(self, key): + return self.db_loop_iterations_id_index[key] + def db_has_loop_iteration_with_id(self, key): + return key in self.db_loop_iterations_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBOpmWasTriggeredBy(object): + + vtType = 'opm_was_triggered_by' + + def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None): + self.db_deleted_effect = [] + self._db_effect = effect + self.db_deleted_role = [] + self._db_role = role + self.db_deleted_cause = [] + self._db_cause = cause + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.db_deleted_opm_times = [] + if opm_times is None: + self._db_opm_times = [] + else: + self._db_opm_times = opm_times + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmWasTriggeredBy.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmWasTriggeredBy() + if self._db_effect is not None: + cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap) + if self._db_role is not None: + cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap) + if self._db_cause is not None: + cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + if self._db_opm_times is None: + cp._db_opm_times = [] + else: + cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmWasTriggeredBy() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'effect' in class_dict: + res = class_dict['effect'](old_obj, trans_dict) + new_obj.db_effect = res + elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None: + obj = old_obj.db_effect + new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'): + for obj in old_obj.db_deleted_effect: + n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict) + new_obj.db_deleted_effect.append(n_obj) + if 'role' in class_dict: + res = class_dict['role'](old_obj, trans_dict) + new_obj.db_role = res + elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None: + obj = old_obj.db_role + new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'): + for obj in old_obj.db_deleted_role: + n_obj = DBOpmRole.update_version(obj, trans_dict) + new_obj.db_deleted_role.append(n_obj) + if 'cause' in class_dict: + res = class_dict['cause'](old_obj, trans_dict) + new_obj.db_cause = res + elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None: + obj = old_obj.db_cause + new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'): + for obj in old_obj.db_deleted_cause: + n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict) + new_obj.db_deleted_cause.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + if 'opm_times' in class_dict: + res = class_dict['opm_times'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_opm_time(obj) + elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None: + for obj in old_obj.db_opm_times: + new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'): + for obj in old_obj.db_deleted_opm_times: + n_obj = DBOpmTime.update_version(obj, trans_dict) + new_obj.db_deleted_opm_times.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_effect is not None: + children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_effect = None + if self._db_role is not None: + children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_role = None + if self._db_cause is not None: + children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_cause = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + to_del = [] + for child in self.db_opm_times: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_opm_time(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_effect) + children.extend(self.db_deleted_role) + children.extend(self.db_deleted_cause) + children.extend(self.db_deleted_accounts) + children.extend(self.db_deleted_opm_times) + if remove: + self.db_deleted_effect = [] + self.db_deleted_role = [] + self.db_deleted_cause = [] + self.db_deleted_accounts = [] + self.db_deleted_opm_times = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_effect is not None and self._db_effect.has_changes(): + return True + if self._db_role is not None and self._db_role.has_changes(): + return True + if self._db_cause is not None and self._db_cause.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + for child in self._db_opm_times: + if child.has_changes(): + return True + return False + def __get_db_effect(self): + return self._db_effect + def __set_db_effect(self, effect): + self._db_effect = effect + self.is_dirty = True + db_effect = property(__get_db_effect, __set_db_effect) + def db_add_effect(self, effect): + self._db_effect = effect + def db_change_effect(self, effect): + self._db_effect = effect + def db_delete_effect(self, effect): + if not self.is_new: + self.db_deleted_effect.append(self._db_effect) + self._db_effect = None + + def __get_db_role(self): + return self._db_role + def __set_db_role(self, role): + self._db_role = role + self.is_dirty = True + db_role = property(__get_db_role, __set_db_role) + def db_add_role(self, role): + self._db_role = role + def db_change_role(self, role): + self._db_role = role + def db_delete_role(self, role): + if not self.is_new: + self.db_deleted_role.append(self._db_role) + self._db_role = None + + def __get_db_cause(self): + return self._db_cause + def __set_db_cause(self, cause): + self._db_cause = cause + self.is_dirty = True + db_cause = property(__get_db_cause, __set_db_cause) + def db_add_cause(self, cause): + self._db_cause = cause + def db_change_cause(self, cause): + self._db_cause = cause + def db_delete_cause(self, cause): + if not self.is_new: + self.db_deleted_cause.append(self._db_cause) + self._db_cause = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def __get_db_opm_times(self): + return self._db_opm_times + def __set_db_opm_times(self, opm_times): + self._db_opm_times = opm_times + self.is_dirty = True + db_opm_times = property(__get_db_opm_times, __set_db_opm_times) + def db_get_opm_times(self): + return self._db_opm_times + def db_add_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_change_opm_time(self, opm_time): + self.is_dirty = True + self._db_opm_times.append(opm_time) + def db_delete_opm_time(self, opm_time): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_opm_time(self, key): + return None + + + +class DBControlParameter(object): + + vtType = 'controlParameter' + + def __init__(self, id=None, name=None, value=None): + self._db_id = id + self._db_name = name + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBControlParameter.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBControlParameter(id=self._db_id, + name=self._db_name, + value=self._db_value) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBControlParameter() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def getPrimaryKey(self): + return self._db_id + +class DBMashupActionAnnotation(object): + + vtType = 'mashup_actionAnnotation' + + def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None): + self._db_id = id + self._db_key = key + self._db_value = value + self._db_action_id = action_id + self._db_date = date + self._db_user = user + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBMashupActionAnnotation.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBMashupActionAnnotation(id=self._db_id, + key=self._db_key, + value=self._db_value, + action_id=self._db_action_id, + date=self._db_date, + user=self._db_user) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_action_id') and ('mashup_action', self._db_action_id) in id_remap: + cp._db_action_id = id_remap[('mashup_action', self._db_action_id)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBMashupActionAnnotation() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'key' in class_dict: + res = class_dict['key'](old_obj, trans_dict) + new_obj.db_key = res + elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None: + new_obj.db_key = old_obj.db_key + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + if 'action_id' in class_dict: + res = class_dict['action_id'](old_obj, trans_dict) + new_obj.db_action_id = res + elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None: + new_obj.db_action_id = old_obj.db_action_id + if 'date' in class_dict: + res = class_dict['date'](old_obj, trans_dict) + new_obj.db_date = res + elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None: + new_obj.db_date = old_obj.db_date + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_key(self): + return self._db_key + def __set_db_key(self, key): + self._db_key = key + self.is_dirty = True + db_key = property(__get_db_key, __set_db_key) + def db_add_key(self, key): + self._db_key = key + def db_change_key(self, key): + self._db_key = key + def db_delete_key(self, key): + self._db_key = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def __get_db_action_id(self): + return self._db_action_id + def __set_db_action_id(self, action_id): + self._db_action_id = action_id + self.is_dirty = True + db_action_id = property(__get_db_action_id, __set_db_action_id) + def db_add_action_id(self, action_id): + self._db_action_id = action_id + def db_change_action_id(self, action_id): + self._db_action_id = action_id + def db_delete_action_id(self, action_id): + self._db_action_id = None + + def __get_db_date(self): + return self._db_date + def __set_db_date(self, date): + self._db_date = date + self.is_dirty = True + db_date = property(__get_db_date, __set_db_date) + def db_add_date(self, date): + self._db_date = date + def db_change_date(self, date): + self._db_date = date + def db_delete_date(self, date): + self._db_date = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def getPrimaryKey(self): + return self._db_id + +class DBConnection(object): + + vtType = 'connection' + + def __init__(self, id=None, ports=None): + self._db_id = id + self.db_deleted_ports = [] + self.db_ports_id_index = {} + self.db_ports_type_index = {} + if ports is None: + self._db_ports = [] + else: + self._db_ports = ports + for v in self._db_ports: + self.db_ports_id_index[v.db_id] = v + self.db_ports_type_index[v.db_type] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBConnection.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBConnection(id=self._db_id) + if self._db_ports is None: + cp._db_ports = [] + else: + cp._db_ports = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ports] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_ports_id_index = dict((v.db_id, v) for v in cp._db_ports) + cp.db_ports_type_index = dict((v.db_type, v) for v in cp._db_ports) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBConnection() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'ports' in class_dict: + res = class_dict['ports'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_port(obj) + elif hasattr(old_obj, 'db_ports') and old_obj.db_ports is not None: + for obj in old_obj.db_ports: + new_obj.db_add_port(DBPort.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_ports') and hasattr(new_obj, 'db_deleted_ports'): + for obj in old_obj.db_deleted_ports: + n_obj = DBPort.update_version(obj, trans_dict) + new_obj.db_deleted_ports.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_ports: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_port(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_ports) + if remove: + self.db_deleted_ports = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_ports: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_ports(self): + return self._db_ports + def __set_db_ports(self, ports): + self._db_ports = ports + self.is_dirty = True + db_ports = property(__get_db_ports, __set_db_ports) + def db_get_ports(self): + return self._db_ports + def db_add_port(self, port): + self.is_dirty = True + self._db_ports.append(port) + self.db_ports_id_index[port.db_id] = port + self.db_ports_type_index[port.db_type] = port + def db_change_port(self, port): + self.is_dirty = True + found = False + for i in xrange(len(self._db_ports)): + if self._db_ports[i].db_id == port.db_id: + self._db_ports[i] = port + found = True + break + if not found: + self._db_ports.append(port) + self.db_ports_id_index[port.db_id] = port + self.db_ports_type_index[port.db_type] = port + def db_delete_port(self, port): + self.is_dirty = True + for i in xrange(len(self._db_ports)): + if self._db_ports[i].db_id == port.db_id: + if not self._db_ports[i].is_new: + self.db_deleted_ports.append(self._db_ports[i]) + del self._db_ports[i] + break + del self.db_ports_id_index[port.db_id] + del self.db_ports_type_index[port.db_type] + def db_get_port(self, key): + for i in xrange(len(self._db_ports)): + if self._db_ports[i].db_id == key: + return self._db_ports[i] + return None + def db_get_port_by_id(self, key): + return self.db_ports_id_index[key] + def db_has_port_with_id(self, key): + return key in self.db_ports_id_index + def db_get_port_by_type(self, key): + return self.db_ports_type_index[key] + def db_has_port_with_type(self, key): + return key in self.db_ports_type_index + + def getPrimaryKey(self): + return self._db_id + +class DBOpmProcess(object): + + vtType = 'opm_process' + + def __init__(self, id=None, value=None, accounts=None): + self._db_id = id + self.db_deleted_value = [] + self._db_value = value + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmProcess.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmProcess(id=self._db_id) + if self._db_value is not None: + cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmProcess() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + obj = old_obj.db_value + new_obj.db_add_value(DBOpmProcessValue.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'): + for obj in old_obj.db_deleted_value: + n_obj = DBOpmProcessValue.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_value is not None: + children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_value = None + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_value) + children.extend(self.db_deleted_accounts) + if remove: + self.db_deleted_value = [] + self.db_deleted_accounts = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_value is not None and self._db_value.has_changes(): + return True + for child in self._db_accounts: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + if not self.is_new: + self.db_deleted_value.append(self._db_value) + self._db_value = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def getPrimaryKey(self): + return self._db_id + +class DBIsPartOf(object): + + vtType = 'is_part_of' + + def __init__(self, prov_ref=None): + self._db_prov_ref = prov_ref + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBIsPartOf.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBIsPartOf(prov_ref=self._db_prov_ref) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBIsPartOf() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_ref' in class_dict: + res = class_dict['prov_ref'](old_obj, trans_dict) + new_obj.db_prov_ref = res + elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None: + new_obj.db_prov_ref = old_obj.db_prov_ref + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_prov_ref(self): + return self._db_prov_ref + def __set_db_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + self.is_dirty = True + db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref) + def db_add_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_change_prov_ref(self, prov_ref): + self._db_prov_ref = prov_ref + def db_delete_prov_ref(self, prov_ref): + self._db_prov_ref = None + + + +class DBPEFunction(object): + + vtType = 'pe_function' + + def __init__(self, id=None, module_id=None, port_name=None, is_alias=None, parameters=None): + self._db_id = id + self._db_module_id = module_id + self._db_port_name = port_name + self._db_is_alias = is_alias + self.db_deleted_parameters = [] + self.db_parameters_id_index = {} + if parameters is None: + self._db_parameters = [] + else: + self._db_parameters = parameters + for v in self._db_parameters: + self.db_parameters_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBPEFunction.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBPEFunction(id=self._db_id, + module_id=self._db_module_id, + port_name=self._db_port_name, + is_alias=self._db_is_alias) + if self._db_parameters is None: + cp._db_parameters = [] + else: + cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap: + cp._db_module_id = id_remap[('module', self._db_module_id)] + + # recreate indices and set flags + cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBPEFunction() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'module_id' in class_dict: + res = class_dict['module_id'](old_obj, trans_dict) + new_obj.db_module_id = res + elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None: + new_obj.db_module_id = old_obj.db_module_id + if 'port_name' in class_dict: + res = class_dict['port_name'](old_obj, trans_dict) + new_obj.db_port_name = res + elif hasattr(old_obj, 'db_port_name') and old_obj.db_port_name is not None: + new_obj.db_port_name = old_obj.db_port_name + if 'is_alias' in class_dict: + res = class_dict['is_alias'](old_obj, trans_dict) + new_obj.db_is_alias = res + elif hasattr(old_obj, 'db_is_alias') and old_obj.db_is_alias is not None: + new_obj.db_is_alias = old_obj.db_is_alias + if 'parameters' in class_dict: + res = class_dict['parameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_parameter(obj) + elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None: + for obj in old_obj.db_parameters: + new_obj.db_add_parameter(DBPEParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'): + for obj in old_obj.db_deleted_parameters: + n_obj = DBPEParameter.update_version(obj, trans_dict) + new_obj.db_deleted_parameters.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_parameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_parameter(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_parameters) + if remove: + self.db_deleted_parameters = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_parameters: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_module_id(self): + return self._db_module_id + def __set_db_module_id(self, module_id): + self._db_module_id = module_id + self.is_dirty = True + db_module_id = property(__get_db_module_id, __set_db_module_id) + def db_add_module_id(self, module_id): + self._db_module_id = module_id + def db_change_module_id(self, module_id): + self._db_module_id = module_id + def db_delete_module_id(self, module_id): + self._db_module_id = None + + def __get_db_port_name(self): + return self._db_port_name + def __set_db_port_name(self, port_name): + self._db_port_name = port_name + self.is_dirty = True + db_port_name = property(__get_db_port_name, __set_db_port_name) + def db_add_port_name(self, port_name): + self._db_port_name = port_name + def db_change_port_name(self, port_name): + self._db_port_name = port_name + def db_delete_port_name(self, port_name): + self._db_port_name = None + + def __get_db_is_alias(self): + return self._db_is_alias + def __set_db_is_alias(self, is_alias): + self._db_is_alias = is_alias + self.is_dirty = True + db_is_alias = property(__get_db_is_alias, __set_db_is_alias) + def db_add_is_alias(self, is_alias): + self._db_is_alias = is_alias + def db_change_is_alias(self, is_alias): + self._db_is_alias = is_alias + def db_delete_is_alias(self, is_alias): + self._db_is_alias = None + + def __get_db_parameters(self): + return self._db_parameters + def __set_db_parameters(self, parameters): + self._db_parameters = parameters + self.is_dirty = True + db_parameters = property(__get_db_parameters, __set_db_parameters) + def db_get_parameters(self): + return self._db_parameters + def db_add_parameter(self, parameter): + self.is_dirty = True + self._db_parameters.append(parameter) + self.db_parameters_id_index[parameter.db_id] = parameter + def db_change_parameter(self, parameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == parameter.db_id: + self._db_parameters[i] = parameter + found = True + break + if not found: + self._db_parameters.append(parameter) + self.db_parameters_id_index[parameter.db_id] = parameter + def db_delete_parameter(self, parameter): + self.is_dirty = True + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == parameter.db_id: + if not self._db_parameters[i].is_new: + self.db_deleted_parameters.append(self._db_parameters[i]) + del self._db_parameters[i] + break + del self.db_parameters_id_index[parameter.db_id] + def db_get_parameter(self, key): + for i in xrange(len(self._db_parameters)): + if self._db_parameters[i].db_id == key: + return self._db_parameters[i] + return None + def db_get_parameter_by_id(self, key): + return self.db_parameters_id_index[key] + def db_has_parameter_with_id(self, key): + return key in self.db_parameters_id_index + + def getPrimaryKey(self): + return self._db_id + +class DBOpmProcessValue(object): + + vtType = 'opm_process_value' + + def __init__(self, value=None): + self.db_deleted_value = [] + self._db_value = value + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmProcessValue.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmProcessValue() + if self._db_value is not None: + cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmProcessValue() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + obj = old_obj.db_value + if obj.vtType == 'module_exec': + new_obj.db_add_value(DBModuleExec.update_version(obj, trans_dict)) + elif obj.vtType == 'group_exec': + new_obj.db_add_value(DBGroupExec.update_version(obj, trans_dict)) + elif obj.vtType == 'loop_exec': + new_obj.db_add_value(DBLoopExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'): + for obj in old_obj.db_deleted_value: + if obj.vtType == 'module_exec': + n_obj = DBModuleExec.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + elif obj.vtType == 'group_exec': + n_obj = DBGroupExec.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + elif obj.vtType == 'loop_exec': + n_obj = DBLoopExec.update_version(obj, trans_dict) + new_obj.db_deleted_value.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_value is not None: + children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_value = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_value) + if remove: + self.db_deleted_value = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_value is not None and self._db_value.has_changes(): + return True + return False + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + if not self.is_new: + self.db_deleted_value.append(self._db_value) + self._db_value = None + + + +class DBAction(object): + + vtType = 'action' + + def __init__(self, operations=None, id=None, prevId=None, date=None, session=None, user=None, annotations=None): + self.db_deleted_operations = [] + self.db_operations_id_index = {} + if operations is None: + self._db_operations = [] + else: + self._db_operations = operations + for v in self._db_operations: + self.db_operations_id_index[v.db_id] = v + self._db_id = id + self._db_prevId = prevId + self._db_date = date + self._db_session = session + self._db_user = user + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBAction.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBAction(id=self._db_id, + prevId=self._db_prevId, + date=self._db_date, + session=self._db_session, + user=self._db_user) + if self._db_operations is None: + cp._db_operations = [] + else: + cp._db_operations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_operations] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_prevId') and ('action', self._db_prevId) in id_remap: + cp._db_prevId = id_remap[('action', self._db_prevId)] + + # recreate indices and set flags + cp.db_operations_id_index = dict((v.db_id, v) for v in cp._db_operations) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBAction() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'operations' in class_dict: + res = class_dict['operations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_operation(obj) + elif hasattr(old_obj, 'db_operations') and old_obj.db_operations is not None: + for obj in old_obj.db_operations: + if obj.vtType == 'add': + new_obj.db_add_operation(DBAdd.update_version(obj, trans_dict)) + elif obj.vtType == 'delete': + new_obj.db_add_operation(DBDelete.update_version(obj, trans_dict)) + elif obj.vtType == 'change': + new_obj.db_add_operation(DBChange.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_operations') and hasattr(new_obj, 'db_deleted_operations'): + for obj in old_obj.db_deleted_operations: + if obj.vtType == 'add': + n_obj = DBAdd.update_version(obj, trans_dict) + new_obj.db_deleted_operations.append(n_obj) + elif obj.vtType == 'delete': + n_obj = DBDelete.update_version(obj, trans_dict) + new_obj.db_deleted_operations.append(n_obj) + elif obj.vtType == 'change': + n_obj = DBChange.update_version(obj, trans_dict) + new_obj.db_deleted_operations.append(n_obj) + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'prevId' in class_dict: + res = class_dict['prevId'](old_obj, trans_dict) + new_obj.db_prevId = res + elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None: + new_obj.db_prevId = old_obj.db_prevId + if 'date' in class_dict: + res = class_dict['date'](old_obj, trans_dict) + new_obj.db_date = res + elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None: + new_obj.db_date = old_obj.db_date + if 'session' in class_dict: + res = class_dict['session'](old_obj, trans_dict) + new_obj.db_session = res + elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None: + new_obj.db_session = old_obj.db_session + if 'user' in class_dict: + res = class_dict['user'](old_obj, trans_dict) + new_obj.db_user = res + elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None: + new_obj.db_user = old_obj.db_user + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_operations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_operation(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_operations) + if remove: + self.db_deleted_annotations = [] + self.db_deleted_operations = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_operations: + if child.has_changes(): + return True + return False + def __get_db_operations(self): + return self._db_operations + def __set_db_operations(self, operations): + self._db_operations = operations + self.is_dirty = True + db_operations = property(__get_db_operations, __set_db_operations) + def db_get_operations(self): + return self._db_operations + def db_add_operation(self, operation): + self.is_dirty = True + self._db_operations.append(operation) + self.db_operations_id_index[operation.db_id] = operation + def db_change_operation(self, operation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_operations)): + if self._db_operations[i].db_id == operation.db_id: + self._db_operations[i] = operation + found = True + break + if not found: + self._db_operations.append(operation) + self.db_operations_id_index[operation.db_id] = operation + def db_delete_operation(self, operation): + self.is_dirty = True + for i in xrange(len(self._db_operations)): + if self._db_operations[i].db_id == operation.db_id: + if not self._db_operations[i].is_new: + self.db_deleted_operations.append(self._db_operations[i]) + del self._db_operations[i] + break + del self.db_operations_id_index[operation.db_id] + def db_get_operation(self, key): + for i in xrange(len(self._db_operations)): + if self._db_operations[i].db_id == key: + return self._db_operations[i] + return None + def db_get_operation_by_id(self, key): + return self.db_operations_id_index[key] + def db_has_operation_with_id(self, key): + return key in self.db_operations_id_index + + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_prevId(self): + return self._db_prevId + def __set_db_prevId(self, prevId): + self._db_prevId = prevId + self.is_dirty = True + db_prevId = property(__get_db_prevId, __set_db_prevId) + def db_add_prevId(self, prevId): + self._db_prevId = prevId + def db_change_prevId(self, prevId): + self._db_prevId = prevId + def db_delete_prevId(self, prevId): + self._db_prevId = None + + def __get_db_date(self): + return self._db_date + def __set_db_date(self, date): + self._db_date = date + self.is_dirty = True + db_date = property(__get_db_date, __set_db_date) + def db_add_date(self, date): + self._db_date = date + def db_change_date(self, date): + self._db_date = date + def db_delete_date(self, date): + self._db_date = None + + def __get_db_session(self): + return self._db_session + def __set_db_session(self, session): + self._db_session = session + self.is_dirty = True + db_session = property(__get_db_session, __set_db_session) + def db_add_session(self, session): + self._db_session = session + def db_change_session(self, session): + self._db_session = session + def db_delete_session(self, session): + self._db_session = None + + def __get_db_user(self): + return self._db_user + def __set_db_user(self, user): + self._db_user = user + self.is_dirty = True + db_user = property(__get_db_user, __set_db_user) + def db_add_user(self, user): + self._db_user = user + def db_change_user(self, user): + self._db_user = user + def db_delete_user(self, user): + self._db_user = None + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def getPrimaryKey(self): + return self._db_id + +class DBOpmAgent(object): + + vtType = 'opm_agent' + + def __init__(self, id=None, value=None, accounts=None): + self._db_id = id + self._db_value = value + self.db_deleted_accounts = [] + if accounts is None: + self._db_accounts = [] + else: + self._db_accounts = accounts + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBOpmAgent.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBOpmAgent(id=self._db_id, + value=self._db_value) + if self._db_accounts is None: + cp._db_accounts = [] + else: + cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBOpmAgent() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'value' in class_dict: + res = class_dict['value'](old_obj, trans_dict) + new_obj.db_value = res + elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None: + new_obj.db_value = old_obj.db_value + if 'accounts' in class_dict: + res = class_dict['accounts'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_account(obj) + elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None: + for obj in old_obj.db_accounts: + new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'): + for obj in old_obj.db_deleted_accounts: + n_obj = DBOpmAccountId.update_version(obj, trans_dict) + new_obj.db_deleted_accounts.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_accounts: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_account(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_accounts) + if remove: + self.db_deleted_accounts = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_accounts: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_value(self): + return self._db_value + def __set_db_value(self, value): + self._db_value = value + self.is_dirty = True + db_value = property(__get_db_value, __set_db_value) + def db_add_value(self, value): + self._db_value = value + def db_change_value(self, value): + self._db_value = value + def db_delete_value(self, value): + self._db_value = None + + def __get_db_accounts(self): + return self._db_accounts + def __set_db_accounts(self, accounts): + self._db_accounts = accounts + self.is_dirty = True + db_accounts = property(__get_db_accounts, __set_db_accounts) + def db_get_accounts(self): + return self._db_accounts + def db_add_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_change_account(self, account): + self.is_dirty = True + self._db_accounts.append(account) + def db_delete_account(self, account): + self.is_dirty = True + raise Exception('Cannot delete a non-keyed object') + def db_get_account(self, key): + return None + + def getPrimaryKey(self): + return self._db_id + +class DBDelete(object): + + vtType = 'delete' + + def __init__(self, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None): + self._db_id = id + self._db_what = what + self._db_objectId = objectId + self._db_parentObjId = parentObjId + self._db_parentObjType = parentObjType + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBDelete.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBDelete(id=self._db_id, + what=self._db_what, + objectId=self._db_objectId, + parentObjId=self._db_parentObjId, + parentObjType=self._db_parentObjType) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap: + cp._db_objectId = id_remap[(self._db_what, self._db_objectId)] + if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap: + cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)] + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBDelete() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'what' in class_dict: + res = class_dict['what'](old_obj, trans_dict) + new_obj.db_what = res + elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None: + new_obj.db_what = old_obj.db_what + if 'objectId' in class_dict: + res = class_dict['objectId'](old_obj, trans_dict) + new_obj.db_objectId = res + elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None: + new_obj.db_objectId = old_obj.db_objectId + if 'parentObjId' in class_dict: + res = class_dict['parentObjId'](old_obj, trans_dict) + new_obj.db_parentObjId = res + elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None: + new_obj.db_parentObjId = old_obj.db_parentObjId + if 'parentObjType' in class_dict: + res = class_dict['parentObjType'](old_obj, trans_dict) + new_obj.db_parentObjType = res + elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None: + new_obj.db_parentObjType = old_obj.db_parentObjType + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + return [(self, parent[0], parent[1])] + def db_deleted_children(self, remove=False): + children = [] + return children + def has_changes(self): + if self.is_dirty: + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_what(self): + return self._db_what + def __set_db_what(self, what): + self._db_what = what + self.is_dirty = True + db_what = property(__get_db_what, __set_db_what) + def db_add_what(self, what): + self._db_what = what + def db_change_what(self, what): + self._db_what = what + def db_delete_what(self, what): + self._db_what = None + + def __get_db_objectId(self): + return self._db_objectId + def __set_db_objectId(self, objectId): + self._db_objectId = objectId + self.is_dirty = True + db_objectId = property(__get_db_objectId, __set_db_objectId) + def db_add_objectId(self, objectId): + self._db_objectId = objectId + def db_change_objectId(self, objectId): + self._db_objectId = objectId + def db_delete_objectId(self, objectId): + self._db_objectId = None + + def __get_db_parentObjId(self): + return self._db_parentObjId + def __set_db_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + self.is_dirty = True + db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId) + def db_add_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_change_parentObjId(self, parentObjId): + self._db_parentObjId = parentObjId + def db_delete_parentObjId(self, parentObjId): + self._db_parentObjId = None + + def __get_db_parentObjType(self): + return self._db_parentObjType + def __set_db_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + self.is_dirty = True + db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType) + def db_add_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_change_parentObjType(self, parentObjType): + self._db_parentObjType = parentObjType + def db_delete_parentObjType(self, parentObjType): + self._db_parentObjType = None + + def getPrimaryKey(self): + return self._db_id + +class DBProvAssociation(object): + + vtType = 'prov_association' + + def __init__(self, prov_activity=None, prov_agent=None, prov_plan=None, prov_role=None): + self.db_deleted_prov_activity = [] + self._db_prov_activity = prov_activity + self.db_deleted_prov_agent = [] + self._db_prov_agent = prov_agent + self.db_deleted_prov_plan = [] + self._db_prov_plan = prov_plan + self._db_prov_role = prov_role + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBProvAssociation.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBProvAssociation(prov_role=self._db_prov_role) + if self._db_prov_activity is not None: + cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap) + if self._db_prov_agent is not None: + cp._db_prov_agent = self._db_prov_agent.do_copy(new_ids, id_scope, id_remap) + if self._db_prov_plan is not None: + cp._db_prov_plan = self._db_prov_plan.do_copy(new_ids, id_scope, id_remap) + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBProvAssociation() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'prov_activity' in class_dict: + res = class_dict['prov_activity'](old_obj, trans_dict) + new_obj.db_prov_activity = res + elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None: + obj = old_obj.db_prov_activity + new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'): + for obj in old_obj.db_deleted_prov_activity: + n_obj = DBRefProvActivity.update_version(obj, trans_dict) + new_obj.db_deleted_prov_activity.append(n_obj) + if 'prov_agent' in class_dict: + res = class_dict['prov_agent'](old_obj, trans_dict) + new_obj.db_prov_agent = res + elif hasattr(old_obj, 'db_prov_agent') and old_obj.db_prov_agent is not None: + obj = old_obj.db_prov_agent + new_obj.db_add_prov_agent(DBRefProvAgent.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_agent') and hasattr(new_obj, 'db_deleted_prov_agent'): + for obj in old_obj.db_deleted_prov_agent: + n_obj = DBRefProvAgent.update_version(obj, trans_dict) + new_obj.db_deleted_prov_agent.append(n_obj) + if 'prov_plan' in class_dict: + res = class_dict['prov_plan'](old_obj, trans_dict) + new_obj.db_prov_plan = res + elif hasattr(old_obj, 'db_prov_plan') and old_obj.db_prov_plan is not None: + obj = old_obj.db_prov_plan + new_obj.db_add_prov_plan(DBRefProvPlan.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_prov_plan') and hasattr(new_obj, 'db_deleted_prov_plan'): + for obj in old_obj.db_deleted_prov_plan: + n_obj = DBRefProvPlan.update_version(obj, trans_dict) + new_obj.db_deleted_prov_plan.append(n_obj) + if 'prov_role' in class_dict: + res = class_dict['prov_role'](old_obj, trans_dict) + new_obj.db_prov_role = res + elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None: + new_obj.db_prov_role = old_obj.db_prov_role + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + if self._db_prov_activity is not None: + children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_activity = None + if self._db_prov_agent is not None: + children.extend(self._db_prov_agent.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_agent = None + if self._db_prov_plan is not None: + children.extend(self._db_prov_plan.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + self._db_prov_plan = None + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_prov_activity) + children.extend(self.db_deleted_prov_agent) + children.extend(self.db_deleted_prov_plan) + if remove: + self.db_deleted_prov_activity = [] + self.db_deleted_prov_agent = [] + self.db_deleted_prov_plan = [] + return children + def has_changes(self): + if self.is_dirty: + return True + if self._db_prov_activity is not None and self._db_prov_activity.has_changes(): + return True + if self._db_prov_agent is not None and self._db_prov_agent.has_changes(): + return True + if self._db_prov_plan is not None and self._db_prov_plan.has_changes(): + return True + return False + def __get_db_prov_activity(self): + return self._db_prov_activity + def __set_db_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + self.is_dirty = True + db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity) + def db_add_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_change_prov_activity(self, prov_activity): + self._db_prov_activity = prov_activity + def db_delete_prov_activity(self, prov_activity): + if not self.is_new: + self.db_deleted_prov_activity.append(self._db_prov_activity) + self._db_prov_activity = None + + def __get_db_prov_agent(self): + return self._db_prov_agent + def __set_db_prov_agent(self, prov_agent): + self._db_prov_agent = prov_agent + self.is_dirty = True + db_prov_agent = property(__get_db_prov_agent, __set_db_prov_agent) + def db_add_prov_agent(self, prov_agent): + self._db_prov_agent = prov_agent + def db_change_prov_agent(self, prov_agent): + self._db_prov_agent = prov_agent + def db_delete_prov_agent(self, prov_agent): + if not self.is_new: + self.db_deleted_prov_agent.append(self._db_prov_agent) + self._db_prov_agent = None + + def __get_db_prov_plan(self): + return self._db_prov_plan + def __set_db_prov_plan(self, prov_plan): + self._db_prov_plan = prov_plan + self.is_dirty = True + db_prov_plan = property(__get_db_prov_plan, __set_db_prov_plan) + def db_add_prov_plan(self, prov_plan): + self._db_prov_plan = prov_plan + def db_change_prov_plan(self, prov_plan): + self._db_prov_plan = prov_plan + def db_delete_prov_plan(self, prov_plan): + if not self.is_new: + self.db_deleted_prov_plan.append(self._db_prov_plan) + self._db_prov_plan = None + + def __get_db_prov_role(self): + return self._db_prov_role + def __set_db_prov_role(self, prov_role): + self._db_prov_role = prov_role + self.is_dirty = True + db_prov_role = property(__get_db_prov_role, __set_db_prov_role) + def db_add_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_change_prov_role(self, prov_role): + self._db_prov_role = prov_role + def db_delete_prov_role(self, prov_role): + self._db_prov_role = None + + + +class DBVistrail(object): + + vtType = 'vistrail' + + def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, actions=None, tags=None, annotations=None, controlParameters=None, vistrailVariables=None, parameter_explorations=None, actionAnnotations=None): + self._db_id = id + self._db_entity_type = entity_type + self._db_version = version + self._db_name = name + self._db_last_modified = last_modified + self.db_deleted_actions = [] + self.db_actions_id_index = {} + if actions is None: + self._db_actions = [] + else: + self._db_actions = actions + for v in self._db_actions: + self.db_actions_id_index[v.db_id] = v + self.db_deleted_tags = [] + self.db_tags_id_index = {} + self.db_tags_name_index = {} + if tags is None: + self._db_tags = [] + else: + self._db_tags = tags + for v in self._db_tags: + self.db_tags_id_index[v.db_id] = v + self.db_tags_name_index[v.db_name] = v + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + self.db_annotations_key_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_annotations_key_index[v.db_key] = v + self.db_deleted_controlParameters = [] + self.db_controlParameters_id_index = {} + self.db_controlParameters_name_index = {} + if controlParameters is None: + self._db_controlParameters = [] + else: + self._db_controlParameters = controlParameters + for v in self._db_controlParameters: + self.db_controlParameters_id_index[v.db_id] = v + self.db_controlParameters_name_index[v.db_name] = v + self.db_deleted_vistrailVariables = [] + self.db_vistrailVariables_name_index = {} + self.db_vistrailVariables_uuid_index = {} + if vistrailVariables is None: + self._db_vistrailVariables = [] + else: + self._db_vistrailVariables = vistrailVariables + for v in self._db_vistrailVariables: + self.db_vistrailVariables_name_index[v.db_name] = v + self.db_vistrailVariables_uuid_index[v.db_uuid] = v + self.db_deleted_parameter_explorations = [] + self.db_parameter_explorations_id_index = {} + if parameter_explorations is None: + self._db_parameter_explorations = [] + else: + self._db_parameter_explorations = parameter_explorations + for v in self._db_parameter_explorations: + self.db_parameter_explorations_id_index[v.db_id] = v + self.db_deleted_actionAnnotations = [] + self.db_actionAnnotations_id_index = {} + self.db_actionAnnotations_action_id_index = {} + self.db_actionAnnotations_key_index = {} + if actionAnnotations is None: + self._db_actionAnnotations = [] + else: + self._db_actionAnnotations = actionAnnotations + for v in self._db_actionAnnotations: + self.db_actionAnnotations_id_index[v.db_id] = v + self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v + self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBVistrail.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBVistrail(id=self._db_id, + entity_type=self._db_entity_type, + version=self._db_version, + name=self._db_name, + last_modified=self._db_last_modified) + if self._db_actions is None: + cp._db_actions = [] + else: + cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions] + if self._db_tags is None: + cp._db_tags = [] + else: + cp._db_tags = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_tags] + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_controlParameters is None: + cp._db_controlParameters = [] + else: + cp._db_controlParameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_controlParameters] + if self._db_vistrailVariables is None: + cp._db_vistrailVariables = [] + else: + cp._db_vistrailVariables = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vistrailVariables] + if self._db_parameter_explorations is None: + cp._db_parameter_explorations = [] + else: + cp._db_parameter_explorations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameter_explorations] + if self._db_actionAnnotations is None: + cp._db_actionAnnotations = [] + else: + cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + + # recreate indices and set flags + cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions) + cp.db_tags_id_index = dict((v.db_id, v) for v in cp._db_tags) + cp.db_tags_name_index = dict((v.db_name, v) for v in cp._db_tags) + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations) + cp.db_controlParameters_id_index = dict((v.db_id, v) for v in cp._db_controlParameters) + cp.db_controlParameters_name_index = dict((v.db_name, v) for v in cp._db_controlParameters) + cp.db_vistrailVariables_name_index = dict((v.db_name, v) for v in cp._db_vistrailVariables) + cp.db_vistrailVariables_uuid_index = dict((v.db_uuid, v) for v in cp._db_vistrailVariables) + cp.db_parameter_explorations_id_index = dict((v.db_id, v) for v in cp._db_parameter_explorations) + cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations) + cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations) + cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBVistrail() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'entity_type' in class_dict: + res = class_dict['entity_type'](old_obj, trans_dict) + new_obj.db_entity_type = res + elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None: + new_obj.db_entity_type = old_obj.db_entity_type + if 'version' in class_dict: + res = class_dict['version'](old_obj, trans_dict) + new_obj.db_version = res + elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None: + new_obj.db_version = old_obj.db_version + if 'name' in class_dict: + res = class_dict['name'](old_obj, trans_dict) + new_obj.db_name = res + elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None: + new_obj.db_name = old_obj.db_name + if 'last_modified' in class_dict: + res = class_dict['last_modified'](old_obj, trans_dict) + new_obj.db_last_modified = res + elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None: + new_obj.db_last_modified = old_obj.db_last_modified + if 'actions' in class_dict: + res = class_dict['actions'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_action(obj) + elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None: + for obj in old_obj.db_actions: + new_obj.db_add_action(DBAction.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'): + for obj in old_obj.db_deleted_actions: + n_obj = DBAction.update_version(obj, trans_dict) + new_obj.db_deleted_actions.append(n_obj) + if 'tags' in class_dict: + res = class_dict['tags'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_tag(obj) + elif hasattr(old_obj, 'db_tags') and old_obj.db_tags is not None: + for obj in old_obj.db_tags: + new_obj.db_add_tag(DBTag.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_tags') and hasattr(new_obj, 'db_deleted_tags'): + for obj in old_obj.db_deleted_tags: + n_obj = DBTag.update_version(obj, trans_dict) + new_obj.db_deleted_tags.append(n_obj) + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'controlParameters' in class_dict: + res = class_dict['controlParameters'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_controlParameter(obj) + elif hasattr(old_obj, 'db_controlParameters') and old_obj.db_controlParameters is not None: + for obj in old_obj.db_controlParameters: + new_obj.db_add_controlParameter(DBControlParameter.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_controlParameters') and hasattr(new_obj, 'db_deleted_controlParameters'): + for obj in old_obj.db_deleted_controlParameters: + n_obj = DBControlParameter.update_version(obj, trans_dict) + new_obj.db_deleted_controlParameters.append(n_obj) + if 'vistrailVariables' in class_dict: + res = class_dict['vistrailVariables'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_vistrailVariable(obj) + elif hasattr(old_obj, 'db_vistrailVariables') and old_obj.db_vistrailVariables is not None: + for obj in old_obj.db_vistrailVariables: + new_obj.db_add_vistrailVariable(DBVistrailVariable.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_vistrailVariables') and hasattr(new_obj, 'db_deleted_vistrailVariables'): + for obj in old_obj.db_deleted_vistrailVariables: + n_obj = DBVistrailVariable.update_version(obj, trans_dict) + new_obj.db_deleted_vistrailVariables.append(n_obj) + if 'parameter_explorations' in class_dict: + res = class_dict['parameter_explorations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_parameter_exploration(obj) + elif hasattr(old_obj, 'db_parameter_explorations') and old_obj.db_parameter_explorations is not None: + for obj in old_obj.db_parameter_explorations: + new_obj.db_add_parameter_exploration(DBParameterExploration.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_parameter_explorations') and hasattr(new_obj, 'db_deleted_parameter_explorations'): + for obj in old_obj.db_deleted_parameter_explorations: + n_obj = DBParameterExploration.update_version(obj, trans_dict) + new_obj.db_deleted_parameter_explorations.append(n_obj) + if 'actionAnnotations' in class_dict: + res = class_dict['actionAnnotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_actionAnnotation(obj) + elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None: + for obj in old_obj.db_actionAnnotations: + new_obj.db_add_actionAnnotation(DBActionAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'): + for obj in old_obj.db_deleted_actionAnnotations: + n_obj = DBActionAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_actionAnnotations.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_actions: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_action(child) + to_del = [] + for child in self.db_tags: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_tag(child) + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_controlParameters: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_controlParameter(child) + to_del = [] + for child in self.db_vistrailVariables: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_vistrailVariable(child) + to_del = [] + for child in self.db_parameter_explorations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_parameter_exploration(child) + to_del = [] + for child in self.db_actionAnnotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_actionAnnotation(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_actions) + children.extend(self.db_deleted_tags) + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_controlParameters) + children.extend(self.db_deleted_vistrailVariables) + children.extend(self.db_deleted_parameter_explorations) + children.extend(self.db_deleted_actionAnnotations) + if remove: + self.db_deleted_actions = [] + self.db_deleted_tags = [] + self.db_deleted_annotations = [] + self.db_deleted_controlParameters = [] + self.db_deleted_vistrailVariables = [] + self.db_deleted_parameter_explorations = [] + self.db_deleted_actionAnnotations = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_actions: + if child.has_changes(): + return True + for child in self._db_tags: + if child.has_changes(): + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_controlParameters: + if child.has_changes(): + return True + for child in self._db_vistrailVariables: + if child.has_changes(): + return True + for child in self._db_parameter_explorations: + if child.has_changes(): + return True + for child in self._db_actionAnnotations: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_entity_type(self): + return self._db_entity_type + def __set_db_entity_type(self, entity_type): + self._db_entity_type = entity_type + self.is_dirty = True + db_entity_type = property(__get_db_entity_type, __set_db_entity_type) + def db_add_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_change_entity_type(self, entity_type): + self._db_entity_type = entity_type + def db_delete_entity_type(self, entity_type): + self._db_entity_type = None + + def __get_db_version(self): + return self._db_version + def __set_db_version(self, version): + self._db_version = version + self.is_dirty = True + db_version = property(__get_db_version, __set_db_version) + def db_add_version(self, version): + self._db_version = version + def db_change_version(self, version): + self._db_version = version + def db_delete_version(self, version): + self._db_version = None + + def __get_db_name(self): + return self._db_name + def __set_db_name(self, name): + self._db_name = name + self.is_dirty = True + db_name = property(__get_db_name, __set_db_name) + def db_add_name(self, name): + self._db_name = name + def db_change_name(self, name): + self._db_name = name + def db_delete_name(self, name): + self._db_name = None + + def __get_db_last_modified(self): + return self._db_last_modified + def __set_db_last_modified(self, last_modified): + self._db_last_modified = last_modified + self.is_dirty = True + db_last_modified = property(__get_db_last_modified, __set_db_last_modified) + def db_add_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_change_last_modified(self, last_modified): + self._db_last_modified = last_modified + def db_delete_last_modified(self, last_modified): + self._db_last_modified = None + + def __get_db_actions(self): + return self._db_actions + def __set_db_actions(self, actions): + self._db_actions = actions + self.is_dirty = True + db_actions = property(__get_db_actions, __set_db_actions) + def db_get_actions(self): + return self._db_actions + def db_add_action(self, action): + self.is_dirty = True + self._db_actions.append(action) + self.db_actions_id_index[action.db_id] = action + def db_change_action(self, action): + self.is_dirty = True + found = False + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == action.db_id: + self._db_actions[i] = action + found = True + break + if not found: + self._db_actions.append(action) + self.db_actions_id_index[action.db_id] = action + def db_delete_action(self, action): + self.is_dirty = True + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == action.db_id: + if not self._db_actions[i].is_new: + self.db_deleted_actions.append(self._db_actions[i]) + del self._db_actions[i] + break + del self.db_actions_id_index[action.db_id] + def db_get_action(self, key): + for i in xrange(len(self._db_actions)): + if self._db_actions[i].db_id == key: + return self._db_actions[i] + return None + def db_get_action_by_id(self, key): + return self.db_actions_id_index[key] + def db_has_action_with_id(self, key): + return key in self.db_actions_id_index + + def __get_db_tags(self): + return self._db_tags + def __set_db_tags(self, tags): + self._db_tags = tags + self.is_dirty = True + db_tags = property(__get_db_tags, __set_db_tags) + def db_get_tags(self): + return self._db_tags + def db_add_tag(self, tag): + self.is_dirty = True + self._db_tags.append(tag) + self.db_tags_id_index[tag.db_id] = tag + self.db_tags_name_index[tag.db_name] = tag + def db_change_tag(self, tag): + self.is_dirty = True + found = False + for i in xrange(len(self._db_tags)): + if self._db_tags[i].db_id == tag.db_id: + self._db_tags[i] = tag + found = True + break + if not found: + self._db_tags.append(tag) + self.db_tags_id_index[tag.db_id] = tag + self.db_tags_name_index[tag.db_name] = tag + def db_delete_tag(self, tag): + self.is_dirty = True + for i in xrange(len(self._db_tags)): + if self._db_tags[i].db_id == tag.db_id: + if not self._db_tags[i].is_new: + self.db_deleted_tags.append(self._db_tags[i]) + del self._db_tags[i] + break + del self.db_tags_id_index[tag.db_id] + del self.db_tags_name_index[tag.db_name] + def db_get_tag(self, key): + for i in xrange(len(self._db_tags)): + if self._db_tags[i].db_id == key: + return self._db_tags[i] + return None + def db_get_tag_by_id(self, key): + return self.db_tags_id_index[key] + def db_has_tag_with_id(self, key): + return key in self.db_tags_id_index + def db_get_tag_by_name(self, key): + return self.db_tags_name_index[key] + def db_has_tag_with_name(self, key): + return key in self.db_tags_name_index + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + self.db_annotations_key_index[annotation.db_key] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + del self.db_annotations_key_index[annotation.db_key] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + def db_get_annotation_by_key(self, key): + return self.db_annotations_key_index[key] + def db_has_annotation_with_key(self, key): + return key in self.db_annotations_key_index + + def __get_db_controlParameters(self): + return self._db_controlParameters + def __set_db_controlParameters(self, controlParameters): + self._db_controlParameters = controlParameters + self.is_dirty = True + db_controlParameters = property(__get_db_controlParameters, __set_db_controlParameters) + def db_get_controlParameters(self): + return self._db_controlParameters + def db_add_controlParameter(self, controlParameter): + self.is_dirty = True + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_change_controlParameter(self, controlParameter): + self.is_dirty = True + found = False + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + self._db_controlParameters[i] = controlParameter + found = True + break + if not found: + self._db_controlParameters.append(controlParameter) + self.db_controlParameters_id_index[controlParameter.db_id] = controlParameter + self.db_controlParameters_name_index[controlParameter.db_name] = controlParameter + def db_delete_controlParameter(self, controlParameter): + self.is_dirty = True + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == controlParameter.db_id: + if not self._db_controlParameters[i].is_new: + self.db_deleted_controlParameters.append(self._db_controlParameters[i]) + del self._db_controlParameters[i] + break + del self.db_controlParameters_id_index[controlParameter.db_id] + del self.db_controlParameters_name_index[controlParameter.db_name] + def db_get_controlParameter(self, key): + for i in xrange(len(self._db_controlParameters)): + if self._db_controlParameters[i].db_id == key: + return self._db_controlParameters[i] + return None + def db_get_controlParameter_by_id(self, key): + return self.db_controlParameters_id_index[key] + def db_has_controlParameter_with_id(self, key): + return key in self.db_controlParameters_id_index + def db_get_controlParameter_by_name(self, key): + return self.db_controlParameters_name_index[key] + def db_has_controlParameter_with_name(self, key): + return key in self.db_controlParameters_name_index + + def __get_db_vistrailVariables(self): + return self._db_vistrailVariables + def __set_db_vistrailVariables(self, vistrailVariables): + self._db_vistrailVariables = vistrailVariables + self.is_dirty = True + db_vistrailVariables = property(__get_db_vistrailVariables, __set_db_vistrailVariables) + def db_get_vistrailVariables(self): + return self._db_vistrailVariables + def db_add_vistrailVariable(self, vistrailVariable): + self.is_dirty = True + self._db_vistrailVariables.append(vistrailVariable) + self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable + self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable + def db_change_vistrailVariable(self, vistrailVariable): + self.is_dirty = True + found = False + for i in xrange(len(self._db_vistrailVariables)): + if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name: + self._db_vistrailVariables[i] = vistrailVariable + found = True + break + if not found: + self._db_vistrailVariables.append(vistrailVariable) + self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable + self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable + def db_delete_vistrailVariable(self, vistrailVariable): + self.is_dirty = True + for i in xrange(len(self._db_vistrailVariables)): + if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name: + if not self._db_vistrailVariables[i].is_new: + self.db_deleted_vistrailVariables.append(self._db_vistrailVariables[i]) + del self._db_vistrailVariables[i] + break + del self.db_vistrailVariables_name_index[vistrailVariable.db_name] + del self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] + def db_get_vistrailVariable(self, key): + for i in xrange(len(self._db_vistrailVariables)): + if self._db_vistrailVariables[i].db_name == key: + return self._db_vistrailVariables[i] + return None + def db_get_vistrailVariable_by_name(self, key): + return self.db_vistrailVariables_name_index[key] + def db_has_vistrailVariable_with_name(self, key): + return key in self.db_vistrailVariables_name_index + def db_get_vistrailVariable_by_uuid(self, key): + return self.db_vistrailVariables_uuid_index[key] + def db_has_vistrailVariable_with_uuid(self, key): + return key in self.db_vistrailVariables_uuid_index + + def __get_db_parameter_explorations(self): + return self._db_parameter_explorations + def __set_db_parameter_explorations(self, parameter_explorations): + self._db_parameter_explorations = parameter_explorations + self.is_dirty = True + db_parameter_explorations = property(__get_db_parameter_explorations, __set_db_parameter_explorations) + def db_get_parameter_explorations(self): + return self._db_parameter_explorations + def db_add_parameter_exploration(self, parameter_exploration): + self.is_dirty = True + self._db_parameter_explorations.append(parameter_exploration) + self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration + def db_change_parameter_exploration(self, parameter_exploration): + self.is_dirty = True + found = False + for i in xrange(len(self._db_parameter_explorations)): + if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id: + self._db_parameter_explorations[i] = parameter_exploration + found = True + break + if not found: + self._db_parameter_explorations.append(parameter_exploration) + self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration + def db_delete_parameter_exploration(self, parameter_exploration): + self.is_dirty = True + for i in xrange(len(self._db_parameter_explorations)): + if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id: + if not self._db_parameter_explorations[i].is_new: + self.db_deleted_parameter_explorations.append(self._db_parameter_explorations[i]) + del self._db_parameter_explorations[i] + break + del self.db_parameter_explorations_id_index[parameter_exploration.db_id] + def db_get_parameter_exploration(self, key): + for i in xrange(len(self._db_parameter_explorations)): + if self._db_parameter_explorations[i].db_id == key: + return self._db_parameter_explorations[i] + return None + def db_get_parameter_exploration_by_id(self, key): + return self.db_parameter_explorations_id_index[key] + def db_has_parameter_exploration_with_id(self, key): + return key in self.db_parameter_explorations_id_index + + def __get_db_actionAnnotations(self): + return self._db_actionAnnotations + def __set_db_actionAnnotations(self, actionAnnotations): + self._db_actionAnnotations = actionAnnotations + self.is_dirty = True + db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations) + def db_get_actionAnnotations(self): + return self._db_actionAnnotations + def db_add_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + self._db_actionAnnotations.append(actionAnnotation) + self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation + self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation + self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation + def db_change_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id: + self._db_actionAnnotations[i] = actionAnnotation + found = True + break + if not found: + self._db_actionAnnotations.append(actionAnnotation) + self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation + self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation + self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation + def db_delete_actionAnnotation(self, actionAnnotation): + self.is_dirty = True + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id: + if not self._db_actionAnnotations[i].is_new: + self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i]) + del self._db_actionAnnotations[i] + break + del self.db_actionAnnotations_id_index[actionAnnotation.db_id] + del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] + try: + del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] + except KeyError: + pass + def db_get_actionAnnotation(self, key): + for i in xrange(len(self._db_actionAnnotations)): + if self._db_actionAnnotations[i].db_id == key: + return self._db_actionAnnotations[i] + return None + def db_get_actionAnnotation_by_id(self, key): + return self.db_actionAnnotations_id_index[key] + def db_has_actionAnnotation_with_id(self, key): + return key in self.db_actionAnnotations_id_index + def db_get_actionAnnotation_by_action_id(self, key): + return self.db_actionAnnotations_action_id_index[key] + def db_has_actionAnnotation_with_action_id(self, key): + return key in self.db_actionAnnotations_action_id_index + def db_get_actionAnnotation_by_key(self, key): + return self.db_actionAnnotations_key_index[key] + def db_has_actionAnnotation_with_key(self, key): + return key in self.db_actionAnnotations_key_index + + def getPrimaryKey(self): + return self._db_id + +class DBModuleExec(object): + + vtType = 'module_exec' + + def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, module_name=None, completed=None, error=None, machine_id=None, annotations=None, loop_execs=None): + self._db_id = id + self._db_ts_start = ts_start + self._db_ts_end = ts_end + self._db_cached = cached + self._db_module_id = module_id + self._db_module_name = module_name + self._db_completed = completed + self._db_error = error + self._db_machine_id = machine_id + self.db_deleted_annotations = [] + self.db_annotations_id_index = {} + if annotations is None: + self._db_annotations = [] + else: + self._db_annotations = annotations + for v in self._db_annotations: + self.db_annotations_id_index[v.db_id] = v + self.db_deleted_loop_execs = [] + self.db_loop_execs_id_index = {} + if loop_execs is None: + self._db_loop_execs = [] + else: + self._db_loop_execs = loop_execs + for v in self._db_loop_execs: + self.db_loop_execs_id_index[v.db_id] = v + self.is_dirty = True + self.is_new = True + + def __copy__(self): + return DBModuleExec.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = DBModuleExec(id=self._db_id, + ts_start=self._db_ts_start, + ts_end=self._db_ts_end, + cached=self._db_cached, + module_id=self._db_module_id, + module_name=self._db_module_name, + completed=self._db_completed, + error=self._db_error, + machine_id=self._db_machine_id) + if self._db_annotations is None: + cp._db_annotations = [] + else: + cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations] + if self._db_loop_execs is None: + cp._db_loop_execs = [] + else: + cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs] + + # set new ids + if new_ids: + new_id = id_scope.getNewId(self.vtType) + if self.vtType in id_scope.remap: + id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id + else: + id_remap[(self.vtType, self.db_id)] = new_id + cp.db_id = new_id + if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap: + cp._db_module_id = id_remap[('module', self._db_module_id)] + if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap: + cp._db_machine_id = id_remap[('machine', self._db_machine_id)] + + # recreate indices and set flags + cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations) + cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs) + if not new_ids: + cp.is_dirty = self.is_dirty + cp.is_new = self.is_new + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBModuleExec() + class_dict = {} + if new_obj.__class__.__name__ in trans_dict: + class_dict = trans_dict[new_obj.__class__.__name__] + if 'id' in class_dict: + res = class_dict['id'](old_obj, trans_dict) + new_obj.db_id = res + elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None: + new_obj.db_id = old_obj.db_id + if 'ts_start' in class_dict: + res = class_dict['ts_start'](old_obj, trans_dict) + new_obj.db_ts_start = res + elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None: + new_obj.db_ts_start = old_obj.db_ts_start + if 'ts_end' in class_dict: + res = class_dict['ts_end'](old_obj, trans_dict) + new_obj.db_ts_end = res + elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None: + new_obj.db_ts_end = old_obj.db_ts_end + if 'cached' in class_dict: + res = class_dict['cached'](old_obj, trans_dict) + new_obj.db_cached = res + elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None: + new_obj.db_cached = old_obj.db_cached + if 'module_id' in class_dict: + res = class_dict['module_id'](old_obj, trans_dict) + new_obj.db_module_id = res + elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None: + new_obj.db_module_id = old_obj.db_module_id + if 'module_name' in class_dict: + res = class_dict['module_name'](old_obj, trans_dict) + new_obj.db_module_name = res + elif hasattr(old_obj, 'db_module_name') and old_obj.db_module_name is not None: + new_obj.db_module_name = old_obj.db_module_name + if 'completed' in class_dict: + res = class_dict['completed'](old_obj, trans_dict) + new_obj.db_completed = res + elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None: + new_obj.db_completed = old_obj.db_completed + if 'error' in class_dict: + res = class_dict['error'](old_obj, trans_dict) + new_obj.db_error = res + elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None: + new_obj.db_error = old_obj.db_error + if 'machine_id' in class_dict: + res = class_dict['machine_id'](old_obj, trans_dict) + new_obj.db_machine_id = res + elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None: + new_obj.db_machine_id = old_obj.db_machine_id + if 'annotations' in class_dict: + res = class_dict['annotations'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_annotation(obj) + elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None: + for obj in old_obj.db_annotations: + new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'): + for obj in old_obj.db_deleted_annotations: + n_obj = DBAnnotation.update_version(obj, trans_dict) + new_obj.db_deleted_annotations.append(n_obj) + if 'loop_execs' in class_dict: + res = class_dict['loop_execs'](old_obj, trans_dict) + for obj in res: + new_obj.db_add_loop_exec(obj) + elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None: + for obj in old_obj.db_loop_execs: + new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict)) + if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'): + for obj in old_obj.db_deleted_loop_execs: + n_obj = DBLoopExec.update_version(obj, trans_dict) + new_obj.db_deleted_loop_execs.append(n_obj) + new_obj.is_new = old_obj.is_new + new_obj.is_dirty = old_obj.is_dirty + return new_obj + + def db_children(self, parent=(None,None), orphan=False, for_action=False): + children = [] + to_del = [] + for child in self.db_annotations: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_annotation(child) + to_del = [] + for child in self.db_loop_execs: + children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action)) + if orphan: + to_del.append(child) + for child in to_del: + self.db_delete_loop_exec(child) + children.append((self, parent[0], parent[1])) + return children + def db_deleted_children(self, remove=False): + children = [] + children.extend(self.db_deleted_annotations) + children.extend(self.db_deleted_loop_execs) + if remove: + self.db_deleted_annotations = [] + self.db_deleted_loop_execs = [] + return children + def has_changes(self): + if self.is_dirty: + return True + for child in self._db_annotations: + if child.has_changes(): + return True + for child in self._db_loop_execs: + if child.has_changes(): + return True + return False + def __get_db_id(self): + return self._db_id + def __set_db_id(self, id): + self._db_id = id + self.is_dirty = True + db_id = property(__get_db_id, __set_db_id) + def db_add_id(self, id): + self._db_id = id + def db_change_id(self, id): + self._db_id = id + def db_delete_id(self, id): + self._db_id = None + + def __get_db_ts_start(self): + return self._db_ts_start + def __set_db_ts_start(self, ts_start): + self._db_ts_start = ts_start + self.is_dirty = True + db_ts_start = property(__get_db_ts_start, __set_db_ts_start) + def db_add_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_change_ts_start(self, ts_start): + self._db_ts_start = ts_start + def db_delete_ts_start(self, ts_start): + self._db_ts_start = None + + def __get_db_ts_end(self): + return self._db_ts_end + def __set_db_ts_end(self, ts_end): + self._db_ts_end = ts_end + self.is_dirty = True + db_ts_end = property(__get_db_ts_end, __set_db_ts_end) + def db_add_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_change_ts_end(self, ts_end): + self._db_ts_end = ts_end + def db_delete_ts_end(self, ts_end): + self._db_ts_end = None + + def __get_db_cached(self): + return self._db_cached + def __set_db_cached(self, cached): + self._db_cached = cached + self.is_dirty = True + db_cached = property(__get_db_cached, __set_db_cached) + def db_add_cached(self, cached): + self._db_cached = cached + def db_change_cached(self, cached): + self._db_cached = cached + def db_delete_cached(self, cached): + self._db_cached = None + + def __get_db_module_id(self): + return self._db_module_id + def __set_db_module_id(self, module_id): + self._db_module_id = module_id + self.is_dirty = True + db_module_id = property(__get_db_module_id, __set_db_module_id) + def db_add_module_id(self, module_id): + self._db_module_id = module_id + def db_change_module_id(self, module_id): + self._db_module_id = module_id + def db_delete_module_id(self, module_id): + self._db_module_id = None + + def __get_db_module_name(self): + return self._db_module_name + def __set_db_module_name(self, module_name): + self._db_module_name = module_name + self.is_dirty = True + db_module_name = property(__get_db_module_name, __set_db_module_name) + def db_add_module_name(self, module_name): + self._db_module_name = module_name + def db_change_module_name(self, module_name): + self._db_module_name = module_name + def db_delete_module_name(self, module_name): + self._db_module_name = None + + def __get_db_completed(self): + return self._db_completed + def __set_db_completed(self, completed): + self._db_completed = completed + self.is_dirty = True + db_completed = property(__get_db_completed, __set_db_completed) + def db_add_completed(self, completed): + self._db_completed = completed + def db_change_completed(self, completed): + self._db_completed = completed + def db_delete_completed(self, completed): + self._db_completed = None + + def __get_db_error(self): + return self._db_error + def __set_db_error(self, error): + self._db_error = error + self.is_dirty = True + db_error = property(__get_db_error, __set_db_error) + def db_add_error(self, error): + self._db_error = error + def db_change_error(self, error): + self._db_error = error + def db_delete_error(self, error): + self._db_error = None + + def __get_db_machine_id(self): + return self._db_machine_id + def __set_db_machine_id(self, machine_id): + self._db_machine_id = machine_id + self.is_dirty = True + db_machine_id = property(__get_db_machine_id, __set_db_machine_id) + def db_add_machine_id(self, machine_id): + self._db_machine_id = machine_id + def db_change_machine_id(self, machine_id): + self._db_machine_id = machine_id + def db_delete_machine_id(self, machine_id): + self._db_machine_id = None + + def __get_db_annotations(self): + return self._db_annotations + def __set_db_annotations(self, annotations): + self._db_annotations = annotations + self.is_dirty = True + db_annotations = property(__get_db_annotations, __set_db_annotations) + def db_get_annotations(self): + return self._db_annotations + def db_add_annotation(self, annotation): + self.is_dirty = True + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_change_annotation(self, annotation): + self.is_dirty = True + found = False + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + self._db_annotations[i] = annotation + found = True + break + if not found: + self._db_annotations.append(annotation) + self.db_annotations_id_index[annotation.db_id] = annotation + def db_delete_annotation(self, annotation): + self.is_dirty = True + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == annotation.db_id: + if not self._db_annotations[i].is_new: + self.db_deleted_annotations.append(self._db_annotations[i]) + del self._db_annotations[i] + break + del self.db_annotations_id_index[annotation.db_id] + def db_get_annotation(self, key): + for i in xrange(len(self._db_annotations)): + if self._db_annotations[i].db_id == key: + return self._db_annotations[i] + return None + def db_get_annotation_by_id(self, key): + return self.db_annotations_id_index[key] + def db_has_annotation_with_id(self, key): + return key in self.db_annotations_id_index + + def __get_db_loop_execs(self): + return self._db_loop_execs + def __set_db_loop_execs(self, loop_execs): + self._db_loop_execs = loop_execs + self.is_dirty = True + db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs) + def db_get_loop_execs(self): + return self._db_loop_execs + def db_add_loop_exec(self, loop_exec): + self.is_dirty = True + self._db_loop_execs.append(loop_exec) + self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec + def db_change_loop_exec(self, loop_exec): + self.is_dirty = True + found = False + for i in xrange(len(self._db_loop_execs)): + if self._db_loop_execs[i].db_id == loop_exec.db_id: + self._db_loop_execs[i] = loop_exec + found = True + break + if not found: + self._db_loop_execs.append(loop_exec) + self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec + def db_delete_loop_exec(self, loop_exec): + self.is_dirty = True + for i in xrange(len(self._db_loop_execs)): + if self._db_loop_execs[i].db_id == loop_exec.db_id: + if not self._db_loop_execs[i].is_new: + self.db_deleted_loop_execs.append(self._db_loop_execs[i]) + del self._db_loop_execs[i] + break + del self.db_loop_execs_id_index[loop_exec.db_id] + def db_get_loop_exec(self, key): + for i in xrange(len(self._db_loop_execs)): + if self._db_loop_execs[i].db_id == key: + return self._db_loop_execs[i] + return None + def db_get_loop_exec_by_id(self, key): + return self.db_loop_execs_id_index[key] + def db_has_loop_exec_with_id(self, key): + return key in self.db_loop_execs_id_index + + def getPrimaryKey(self): + return self._db_id + diff --git a/vistrails/db/versions/v1_0_4/domain/id_scope.py b/vistrails/db/versions/v1_0_4/domain/id_scope.py new file mode 100644 index 000000000..0e07f029c --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/id_scope.py @@ -0,0 +1,85 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +import copy + +class IdScope: + def __init__(self, beginId=0L, remap=None): + self.ids = {} + self.beginId = beginId + if remap is None: + self.remap = {} + else: + self.remap = remap + + def __copy__(self): + cp = IdScope(beginId=self.beginId) + cp.ids = copy.copy(self.ids) + cp.remap = copy.copy(self.remap) + return cp + + def __str__(self): + return str(self.ids) + + def getNewId(self, objType): + try: + objType = self.remap[objType] + except KeyError: + pass + try: + id = self.ids[objType] + self.ids[objType] += 1 + return id + except KeyError: + self.ids[objType] = self.beginId + 1 + return self.beginId + + def updateBeginId(self, objType, beginId): + try: + objType = self.remap[objType] + except KeyError: + pass + try: + if self.ids[objType] <= beginId: + self.ids[objType] = beginId + except KeyError: + self.ids[objType] = beginId + + def setBeginId(self, objType, beginId): + try: + objType = self.remap[objType] + except KeyError: + pass + self.ids[objType] = beginId diff --git a/vistrails/db/versions/v1_0_4/domain/log.py b/vistrails/db/versions/v1_0_4/domain/log.py new file mode 100644 index 000000000..a127726f3 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/log.py @@ -0,0 +1,72 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +from auto_gen import DBLog as _DBLog +from auto_gen import DBAbstraction, DBModule, DBGroup, DBLoopExec, \ + DBGroupExec, DBModuleExec +from id_scope import IdScope + +import copy + +class DBLog(_DBLog): + + def __init__(self, *args, **kwargs): + _DBLog.__init__(self, *args, **kwargs) + self.id_scope = IdScope(1, + {DBLoopExec.vtType: 'item_exec', + DBModuleExec.vtType: 'item_exec', + DBGroupExec.vtType: 'item_exec', + DBAbstraction.vtType: DBModule.vtType, + DBGroup.vtType: DBModule.vtType}) + + def __copy__(self): + return DBLog.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = _DBLog.do_copy(self, new_ids, id_scope, id_remap) + cp.__class__ = DBLog + cp.id_scope = copy.copy(self.id_scope) + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBLog() + new_obj = _DBLog.update_version(old_obj, trans_dict, new_obj) + new_obj.update_id_scope() + return new_obj + + def update_id_scope(self): + pass diff --git a/vistrails/db/versions/v1_0_4/domain/registry.py b/vistrails/db/versions/v1_0_4/domain/registry.py new file mode 100644 index 000000000..323ebbd72 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/registry.py @@ -0,0 +1,63 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +from auto_gen import DBRegistry as _DBRegistry, DBPackage, DBModuleDescriptor, \ + DBPortSpec +from id_scope import IdScope + +class DBRegistry(_DBRegistry): + def __init__(self, *args, **kwargs): + _DBRegistry.__init__(self, *args, **kwargs) + self.idScope = IdScope() + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBRegistry() + new_obj = _DBRegistry.update_version(old_obj, trans_dict, new_obj) + new_obj.update_id_scope() + return new_obj + + def update_id_scope(self): + for package in self.db_packages: + self.idScope.updateBeginId(DBPackage.vtType, package.db_id+1) + for descriptor in package.db_module_descriptors: + self.idScope.updateBeginId(DBModuleDescriptor.vtType, + descriptor.db_id+1) + for port_spec in descriptor.db_portSpecs: + self.idScope.updateBeginId(DBPortSpec.vtType, + port_spec.db_id+1) + + diff --git a/vistrails/db/versions/v1_0_4/domain/vistrail.py b/vistrails/db/versions/v1_0_4/domain/vistrail.py new file mode 100644 index 000000000..4c1e5a2dd --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/vistrail.py @@ -0,0 +1,220 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +import copy +import hashlib +from auto_gen import DBVistrail as _DBVistrail +from auto_gen import DBAdd, DBChange, DBDelete, DBAbstraction, DBGroup, \ + DBModule, DBAnnotation, DBActionAnnotation, DBParameterExploration +from id_scope import IdScope + +class DBVistrail(_DBVistrail): + def __init__(self, *args, **kwargs): + _DBVistrail.__init__(self, *args, **kwargs) + self.idScope = IdScope(remap={DBAdd.vtType: 'operation', + DBChange.vtType: 'operation', + DBDelete.vtType: 'operation', + DBAbstraction.vtType: DBModule.vtType, + DBGroup.vtType: DBModule.vtType, + DBActionAnnotation.vtType: \ + DBAnnotation.vtType}) + + self.idScope.setBeginId('action', 1) + self.idScope.setBeginId(DBParameterExploration.vtType, 1) + self.db_objects = {} + + # keep a reference to the current logging information here + self.db_log_filename = None + self.log = None + + def __copy__(self): + return DBVistrail.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = _DBVistrail.do_copy(self, new_ids, id_scope, id_remap) + cp.__class__ = DBVistrail + + cp.idScope = copy.copy(self.idScope) + cp.db_objects = copy.copy(self.db_objects) + cp.db_log_filename = self.db_log_filename + if self.log is not None: + cp.log = copy.copy(self.log) + else: + cp.log = None + + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBVistrail() + new_obj = _DBVistrail.update_version(old_obj, trans_dict, new_obj) + new_obj.update_id_scope() + if hasattr(old_obj, 'db_log_filename'): + new_obj.db_log_filename = old_obj.db_log_filename + if hasattr(old_obj, 'log'): + new_obj.log = old_obj.log + return new_obj + + def update_id_scope(self): + def getOldObjId(operation): + if operation.vtType == 'change': + return operation.db_oldObjId + return operation.db_objectId + + def getNewObjId(operation): + if operation.vtType == 'change': + return operation.db_newObjId + return operation.db_objectId + + for action in self.db_actions: + self.idScope.updateBeginId('action', action.db_id+1) + if action.db_session is not None: + self.idScope.updateBeginId('session', action.db_session + 1) + for operation in action.db_operations: + self.idScope.updateBeginId('operation', operation.db_id+1) + if operation.vtType == 'add' or operation.vtType == 'change': + # update ids of data + self.idScope.updateBeginId(operation.db_what, + getNewObjId(operation)+1) + if operation.db_data is None: + if operation.vtType == 'change': + operation.db_objectId = operation.db_oldObjId + self.db_add_object(operation.db_data) + for annotation in action.db_annotations: + self.idScope.updateBeginId('annotation', annotation.db_id+1) + + for annotation in self.db_annotations: + self.idScope.updateBeginId('annotation', annotation.db_id+1) + for annotation in self.db_actionAnnotations: + self.idScope.updateBeginId('annotation', annotation.db_id+1) + for paramexp in self.db_parameter_explorations: + self.idScope.updateBeginId('parameter_exploration', + paramexp.db_id+1) + + def db_add_object(self, obj): + self.db_objects[(obj.vtType, obj.db_id)] = obj + + def db_get_object(self, type, id): + return self.db_objects.get((type, id), None) + + def db_update_object(self, obj, **kwargs): + # want to swap out old object with a new version + # need this for updating aliases... + # hack it using setattr... + real_obj = self.db_objects[(obj.vtType, obj.db_id)] + for (k, v) in kwargs.iteritems(): + if hasattr(real_obj, k): + setattr(real_obj, k, v) + + def update_checkout_version(self, app=''): + checkout_key = "__checkout_version_" + action_key = checkout_key + app + annotation_key = action_key + '_annotationhash' + action_annotation_key = action_key + '_actionannotationhash' + + # delete previous checkout annotations + deletekeys = [action_key,annotation_key,action_annotation_key] + for key in deletekeys: + while self.db_has_annotation_with_key(key): + a = self.db_get_annotation_by_key(key) + self.db_delete_annotation(a) + + # annotation hash - requires annotations to be clean + value = self.hashAnnotations() + if self.db_has_annotation_with_key(annotation_key): + annotation = self.db_get_annotation_by_key(annotation_key) + annotation.db_value = value + else: + annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType), + annotation_key, value) + self.db_add_annotation(annotation) + # action annotation hash + value = self.hashActionAnnotations() + if self.db_has_annotation_with_key(action_annotation_key): + annotation = self.db_get_annotation_by_key(action_annotation_key) + annotation.db_value = value + else: + annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType), + action_annotation_key, value) + self.db_add_annotation(annotation) + # last action id hash + if len(self.db_actions) == 0: + value = 0 + else: + value = max(v.db_id for v in self.db_actions) + if self.db_has_annotation_with_key(action_key): + annotation = self.db_get_annotation_by_key(action_key) + annotation.db_value = str(value) + else: + annotation=DBAnnotation(self.idScope.getNewId(DBAnnotation.vtType), + action_key, str(value)) + self.db_add_annotation(annotation) + + def hashAnnotations(self): + annotations = {} + for annotation in self.db_annotations: + if annotation._db_key not in annotations: + annotations[annotation._db_key] = [] + if annotation._db_value not in annotations[annotation._db_key]: + annotations[annotation._db_key].append(annotation._db_value) + keys = annotations.keys() + keys.sort() + m = hashlib.md5() + for k in keys: + m.update(str(k)) + annotations[k].sort() + for v in annotations[k]: + m.update(str(v)) + return m.hexdigest() + + def hashActionAnnotations(self): + action_annotations = {} + for action_id, key, value in [[aa.db_action_id, aa.db_key, + aa.db_value] for aa in self.db_actionAnnotations]: + index = (str(action_id), key) + if index not in action_annotations: + action_annotations[index] = [] + if value not in action_annotations[index]: + action_annotations[index].append(value) + keys = action_annotations.keys() + keys.sort() + m = hashlib.md5() + for k in keys: + m.update(k[0] + k[1]) + action_annotations[k].sort() + for v in action_annotations[k]: + m.update(str(v)) + return m.hexdigest() diff --git a/vistrails/db/versions/v1_0_4/domain/workflow.py b/vistrails/db/versions/v1_0_4/domain/workflow.py new file mode 100644 index 000000000..af484e669 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/domain/workflow.py @@ -0,0 +1,172 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +from auto_gen import DBWorkflow as _DBWorkflow +from auto_gen import DBAbstraction, DBModule, DBGroup +from id_scope import IdScope + +import copy + +class DBWorkflow(_DBWorkflow): + + def __init__(self, *args, **kwargs): + _DBWorkflow.__init__(self, *args, **kwargs) + self.objects = {} + self.tmp_id = IdScope(1, + {DBAbstraction.vtType: DBModule.vtType, + DBGroup.vtType: DBModule.vtType}) + + def __copy__(self): + return DBWorkflow.do_copy(self) + + def do_copy(self, new_ids=False, id_scope=None, id_remap=None): + cp = _DBWorkflow.do_copy(self, new_ids, id_scope, id_remap) + cp.__class__ = DBWorkflow + # need to go through and reset the index to the copied objects + cp.build_index() + cp.tmp_id = copy.copy(self.tmp_id) + return cp + + @staticmethod + def update_version(old_obj, trans_dict, new_obj=None): + if new_obj is None: + new_obj = DBWorkflow() + new_obj = _DBWorkflow.update_version(old_obj, trans_dict, new_obj) + new_obj.update_id_scope() + new_obj.build_index() + return new_obj + + def update_id_scope(self): + pass + + _vtTypeMap = {DBAbstraction.vtType: DBModule.vtType, + DBGroup.vtType: DBModule.vtType} + + def build_index(self): + g = self._vtTypeMap.get + self.objects = dict(((g(o.vtType, o.vtType), o._db_id), o) + for (o,_,_) in self.db_children()) + + def add_to_index(self, object): + obj_type = self._vtTypeMap.get(object.vtType, object.vtType) + self.objects[(obj_type, object.getPrimaryKey())] = object + + def delete_from_index(self, object): + obj_type = self._vtTypeMap.get(object.vtType, object.vtType) + del self.objects[(obj_type, object.getPrimaryKey())] + + def capitalizeOne(self, str): + return str[0].upper() + str[1:] + + def db_print_objects(self): + for k,v in self.objects.iteritems(): + print '%s: %s' % (k, v) + + def db_has_object(self, type, id): + return (type, id) in self.objects + + def db_get_object(self, type, id): + return self.objects[(type, id)] + + def db_add_object(self, object, parent_obj_type=None, + parent_obj_id=None, parent_obj=None): + if parent_obj is None: + if parent_obj_type is None or parent_obj_id is None: + parent_obj = self + else: + if parent_obj_type == DBAbstraction.vtType or \ + parent_obj_type == DBGroup.vtType: + parent_obj_type = DBModule.vtType + try: + parent_obj = self.objects[(parent_obj_type, parent_obj_id)] + except KeyError: + msg = "Cannot find object of type '%s' with id '%s'" % \ + (parent_obj_type, parent_obj_id) + raise Exception(msg) + if object.vtType == DBAbstraction.vtType or \ + object.vtType == DBGroup.vtType: + obj_type = DBModule.vtType + else: + obj_type = object.vtType + funname = 'db_add_' + obj_type + obj_copy = copy.copy(object) + getattr(parent_obj, funname)(obj_copy) + self.add_to_index(obj_copy) + + def db_change_object(self, old_id, object, parent_obj_type=None, + parent_obj_id=None, parent_obj=None): + if parent_obj is None: + if parent_obj_type is None or parent_obj_id is None: + parent_obj = self + else: + if parent_obj_type == DBAbstraction.vtType or \ + parent_obj_type == DBGroup.vtType: + parent_obj_type = DBModule.vtType + try: + parent_obj = self.objects[(parent_obj_type, parent_obj_id)] + except KeyError: + msg = "Cannot find object of type '%s' with id '%s'" % \ + (parent_obj_type, parent_obj_id) + raise Exception(msg) + + self.db_delete_object(old_id, object.vtType, None, None, parent_obj) + self.db_add_object(object, None, None, parent_obj) + + def db_delete_object(self, obj_id, obj_type, parent_obj_type=None, + parent_obj_id=None, parent_obj=None): + if parent_obj is None: + if parent_obj_type is None or parent_obj_id is None: + parent_obj = self + else: + if parent_obj_type == DBAbstraction.vtType or \ + parent_obj_type == DBGroup.vtType: + parent_obj_type = DBModule.vtType + try: + parent_obj = self.objects[(parent_obj_type, parent_obj_id)] + except KeyError: + msg = "Cannot find object of type '%s' with id '%s'" % \ + (parent_obj_type, parent_obj_id) + raise Exception(msg) + if obj_type == DBAbstraction.vtType or obj_type == DBGroup.vtType: + obj_type = DBModule.vtType + funname = 'db_get_' + obj_type + if hasattr(parent_obj, funname): + object = getattr(parent_obj, funname)(obj_id) + else: + attr_name = 'db_' + obj_type + object = getattr(parent_obj, attr_name) + funname = 'db_delete_' + obj_type + getattr(parent_obj, funname)(object) + self.delete_from_index(object) diff --git a/vistrails/db/versions/v1_0_4/persistence/__init__.py b/vistrails/db/versions/v1_0_4/persistence/__init__.py new file mode 100644 index 000000000..b49862bd9 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/persistence/__init__.py @@ -0,0 +1,472 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### +from xml.auto_gen import XMLDAOListBase +from sql.auto_gen import SQLDAOListBase +from vistrails.core.system import get_elementtree_library + +from vistrails.db import VistrailsDBException +from vistrails.db.versions.v1_0_4 import version as my_version +from vistrails.db.versions.v1_0_4.domain import DBGroup, DBWorkflow, DBVistrail, DBLog, \ + DBRegistry, DBMashuptrail + +root_set = set([DBVistrail.vtType, DBWorkflow.vtType, + DBLog.vtType, DBRegistry.vtType, DBMashuptrail.vtType]) + +ElementTree = get_elementtree_library() + + +class DAOList(dict): + def __init__(self): + self['xml'] = XMLDAOListBase() + self['sql'] = SQLDAOListBase() + + def parse_xml_file(self, filename): + return ElementTree.parse(filename) + + def write_xml_file(self, filename, tree): + def indent(elem, level=0): + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + indent(tree.getroot()) + tree.write(filename) + + def read_xml_object(self, vtType, node): + return self['xml'][vtType].fromXML(node) + + def write_xml_object(self, obj, node=None): + res_node = self['xml'][obj.vtType].toXML(obj, node) + return res_node + + def open_from_xml(self, filename, vtType, tree=None): + """open_from_xml(filename) -> DBVistrail""" + if tree is None: + tree = self.parse_xml_file(filename) + vistrail = self.read_xml_object(vtType, tree.getroot()) + return vistrail + + def save_to_xml(self, obj, filename, tags, version=None): + """save_to_xml(obj : object, filename: str, tags: dict, + version: str) -> None + + """ + root = self.write_xml_object(obj) + if version is None: + version = my_version + root.set('version', version) + for k, v in tags.iteritems(): + root.set(k, v) + tree = ElementTree.ElementTree(root) + self.write_xml_file(filename, tree) + + def open_from_db(self, db_connection, vtType, id=None, lock=False, + global_props=None): + all_objects = {} + if global_props is None: + global_props = {} + if id is not None: + global_props['id'] = id + # global_props + res_objects = self['sql'][vtType].get_sql_columns(db_connection, + global_props, + lock) + if len(res_objects) > 1: + raise VistrailsDBException("More than object of type '%s' and " + "id '%s' exist in the database" % \ + (vtType, id)) + elif len(res_objects) <= 0: + raise VistrailsDBException("No objects of type '%s' and " + "id '%s' exist in the database" % \ + (vtType, id)) + + all_objects.update(res_objects) + res = res_objects.values()[0] + global_props = {'entity_id': res.db_id, + 'entity_type': res.vtType} + + # collect all commands so that they can be executed together + # daoList should contain (dao_type, dao, dbCommand) values + daoList = [] + # dbCommandList should contain dbCommand values + dbCommandList = [] + + # generate SELECT statements + for dao_type, dao in self['sql'].iteritems(): + if dao_type in root_set: + continue + + daoList.append([dao_type, dao, None]) + dbCommand = dao.get_sql_select(db_connection, global_props, lock) + dbCommandList.append(dbCommand) + + # Exacute all select statements + results = self['sql'][vtType].executeSQLGroup(db_connection, + dbCommandList, True) + + # add result to correct dao + for i in xrange(len(daoList)): + daoList[i][2] = results[i] + + # process results + for dao_type, dao, data in daoList: + current_objs = dao.process_sql_columns(data, global_props) + all_objects.update(current_objs) + + if dao_type == DBGroup.vtType: + for key, obj in current_objs.iteritems(): + new_props = {'parent_id': key[1], + 'entity_id': global_props['entity_id'], + 'entity_type': global_props['entity_type']} + res_obj = self.open_from_db(db_connection, + DBWorkflow.vtType, + None, lock, new_props) + res_dict = {} + res_dict[(res_obj.vtType, res_obj.db_id)] = res_obj + all_objects.update(res_dict) + + for key, obj in all_objects.iteritems(): + if key[0] == vtType and key[1] == id: + continue + self['sql'][obj.vtType].from_sql_fast(obj, all_objects) + for obj in all_objects.itervalues(): + obj.is_dirty = False + obj.is_new = False + + return res + + def open_many_from_db(self, db_connection, vtType, ids, lock=False): + """ Loads multiple objects. They need to be loaded as one single + multiple select statement command for performance reasons. + """ + + log_dao = self['sql'][vtType] + # loop through ids and build SELECT statements + selects = [log_dao.get_sql_select(db_connection, {'id': id}, lock) + for id in ids] + # Execute all SELECT statements for main objects + results = log_dao.executeSQLGroup(db_connection, selects, True) + + # list of final objects + objects = [] + # list of selects + selects = [] + # list of children id:all_objects_dict + all_objects_dict = {} + # process each result and extract child SELECTS + # daoList should contain (id, dao_type, dao, result) values + daoList = [] + # selects should contain dbCommand values + selects = [] + global_props = {} + for id, data in zip(ids, results): + res_objects = log_dao.process_sql_columns(data, global_props) + if len(res_objects) > 1: + raise VistrailsDBException("More than object of type '%s' and " + "id '%s' exist in the database" % \ + (vtType, id)) + elif len(res_objects) <= 0: + raise VistrailsDBException("No objects of type '%s' and " + "id '%s' exist in the database" % \ + (vtType, id)) + all_objects = {} + all_objects_dict[id] = all_objects + all_objects.update(res_objects) + objects.append(res_objects.values()[0]) + # collect all commands so that they can be executed together + + # generate SELECT statements for children + for dao_type, dao in self['sql'].iteritems(): + if dao_type in root_set: + continue + + daoList.append([id, dao_type, dao, None]) + dbCommand = dao.get_sql_select(db_connection, global_props, lock) + selects.append(dbCommand) + + # Execute all child select statements + results = self['sql'][vtType].executeSQLGroup(db_connection, + selects, True) + for i in xrange(len(daoList)): + daoList[i][3] = results[i] + + # process results + for id, dao_type, dao, data in daoList: + all_objects = all_objects_dict[id] + current_objs = dao.process_sql_columns(data, global_props) + all_objects.update(current_objs) + + if dao_type == DBGroup.vtType: + for key, obj in current_objs.iteritems(): + new_props = {'parent_id': key[1], + 'entity_id': global_props['entity_id'], + 'entity_type': global_props['entity_type']} + res_obj = self.open_from_db(db_connection, + DBWorkflow.vtType, + None, lock, new_props) + res_dict = {} + res_dict[(res_obj.vtType, res_obj.db_id)] = res_obj + all_objects.update(res_dict) + + + for id, all_objects in all_objects_dict.iteritems(): + for key, obj in all_objects.iteritems(): + if key[0] == vtType and key[1] == id: + continue + self['sql'][obj.vtType].from_sql_fast(obj, all_objects) + for id, dao_type, dao, data in daoList: + all_objects = all_objects_dict[id] + for obj in all_objects.itervalues(): + obj.is_dirty = False + obj.is_new = False + + return objects + + def save_to_db(self, db_connection, obj, do_copy=False, global_props=None): + if do_copy == 'with_ids': + do_copy = True + elif do_copy and obj.db_id is not None: + obj.db_id = None + + children = obj.db_children() + children.reverse() + if global_props is None: + global_props = {'entity_type': obj.vtType} + # print 'global_props:', global_props + + # assumes not deleting entire thing + child = children[0][0] + self['sql'][child.vtType].set_sql_columns(db_connection, child, + global_props, do_copy) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + + global_props = {'entity_id': child.db_id, + 'entity_type': child.vtType} + + # do deletes + if not do_copy: + for (child, _, _) in children: + for c in child.db_deleted_children(True): + self['sql'][c.vtType].delete_sql_column(db_connection, + c, + global_props) + child = children.pop(0)[0] + child.is_dirty = False + child.is_new = False + + if not len(children): + return + + # list of all children + dbCommandList = [] + writtenChildren = [] + # process remaining children + for (child, _, _) in children: + dbCommand = self['sql'][child.vtType].set_sql_command( + db_connection, child, global_props, do_copy) + if dbCommand is not None: + dbCommandList.append(dbCommand) + writtenChildren.append(child) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + + # Debug version of Execute all insert/update statements + #results = [self['sql'][children[0][0].vtType].executeSQL( + # db_connection, c, False) for c in dbCommandList] + + # Execute all insert/update statements + results = self['sql'][children[0][0].vtType].executeSQLGroup( + db_connection, + dbCommandList, False) + resultDict = dict(zip(writtenChildren, results)) + # process remaining children + for (child, _, _) in children: + if child in resultDict: + lastId = resultDict[child] + self['sql'][child.vtType].set_sql_process(child, + global_props, + lastId) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + if child.vtType == DBGroup.vtType: + if child.db_workflow: + # print '*** entity_type:', global_props['entity_type'] + new_props = {'entity_id': global_props['entity_id'], + 'entity_type': global_props['entity_type']} + is_dirty = child.db_workflow.is_dirty + child.db_workflow.db_entity_type = DBWorkflow.vtType + child.db_workflow.is_dirty = is_dirty + self.save_to_db(db_connection, child.db_workflow, do_copy, + new_props) + + def save_many_to_db(self, db_connection, objList, do_copy=False): + if do_copy == 'with_ids': + do_copy = True + if not len(objList): + return + childrenDict = {} + global_propsDict = {} + dbCommandList = [] + writtenChildren = [] + for obj in objList: + if do_copy and obj.db_id is not None: + obj.db_id = None + + children = obj.db_children() + children.reverse() + global_props = {'entity_type': obj.vtType} + + child = children[0][0] + dbCommand = self['sql'][child.vtType].set_sql_command( + db_connection, child, global_props, do_copy) + if dbCommand is not None: + dbCommandList.append(dbCommand) + writtenChildren.append(child) + + childrenDict[child] = children + global_propsDict[child] = global_props + + # Execute all insert/update statements for the main objects + results = self['sql'][children[0][0].vtType].executeSQLGroup( + db_connection, + dbCommandList, False) + resultDict = dict(zip(writtenChildren, results)) + dbCommandList = [] + writtenChildren = [] + for child, children in childrenDict.iteritems(): + # process objects + if child in resultDict: + lastId = resultDict[child] + self['sql'][child.vtType].set_sql_process( + child, global_propsDict[child], lastId) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + + # process children + global_props = {'entity_id': child.db_id, + 'entity_type': child.vtType} + global_propsDict[child] = global_props + # do deletes + if not do_copy: + for (child, _, _) in childrenDict[child]: + for c in child.db_deleted_children(True): + self['sql'][c.vtType].delete_sql_column(db_connection, + c, + global_props) + child = children.pop(0)[0] + child.is_dirty = False + child.is_new = False + + # list of all children + # process remaining children + for (child, _, _) in children: + dbCommand = self['sql'][child.vtType].set_sql_command( + db_connection, child, global_props, do_copy) + if dbCommand is not None: + dbCommandList.append(dbCommand) + writtenChildren.append(child) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + + # Execute all child insert/update statements + results = self['sql'][children[0][0].vtType].executeSQLGroup( + db_connection, + dbCommandList, False) + resultDict = dict(zip(writtenChildren, results)) + + for child, children in childrenDict.iteritems(): + global_props = global_propsDict[child] + # process remaining children + for (child, _, _) in children: + if child in resultDict: + lastId = resultDict[child] + self['sql'][child.vtType].set_sql_process(child, + global_props, + lastId) + self['sql'][child.vtType].to_sql_fast(child, do_copy) + if child.vtType == DBGroup.vtType: + if child.db_workflow: + # print '*** entity_type:', global_props['entity_type'] + new_props = {'entity_id': global_props['entity_id'], + 'entity_type': global_props['entity_type']} + is_dirty = child.db_workflow.is_dirty + child.db_workflow.db_entity_type = DBWorkflow.vtType + child.db_workflow.is_dirty = is_dirty + self.save_to_db(db_connection, child.db_workflow, do_copy, + new_props) + + def delete_from_db(self, db_connection, type, obj_id): + if type not in root_set: + raise VistrailsDBException("Cannot delete entity of type '%s'" \ + % type) + + id_str = str(obj_id) + for (dao_type, dao) in self['sql'].iteritems(): + if dao_type not in root_set: + db_cmd = \ + self['sql'][type].createSQLDelete(dao.table, + {'entity_type': type, + 'entity_id': id_str}) + self['sql'][type].executeSQL(db_connection, db_cmd, False) + db_cmd = self['sql'][type].createSQLDelete(self['sql'][type].table, + {'id': id_str}) + self['sql'][type].executeSQL(db_connection, db_cmd, False) + + def serialize(self, object): + root = self.write_xml_object(object) + return ElementTree.tostring(root) + + def unserialize(self, str, obj_type): + def set_dirty(obj): + for child, _, _ in obj.db_children(): + if child.vtType == DBGroup.vtType: + if child.db_workflow: + set_dirty(child.db_workflow) + child.is_dirty = True + child.is_new = True + try: + root = ElementTree.fromstring(str) + obj = self.read_xml_object(obj_type, root) + set_dirty(obj) + return obj + except SyntaxError, e: + msg = "Invalid VisTrails serialized object %s" % str + raise VistrailsDBException(msg) + return None diff --git a/vistrails/db/versions/v1_0_4/persistence/sql/__init__.py b/vistrails/db/versions/v1_0_4/persistence/sql/__init__.py new file mode 100644 index 000000000..acfc9d276 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/persistence/sql/__init__.py @@ -0,0 +1,36 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +pass \ No newline at end of file diff --git a/vistrails/db/versions/v1_0_4/persistence/sql/auto_gen.py b/vistrails/db/versions/v1_0_4/persistence/sql/auto_gen.py new file mode 100644 index 000000000..769ccfe00 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/persistence/sql/auto_gen.py @@ -0,0 +1,8349 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +"""generated automatically by auto_dao.py""" + +from sql_dao import SQLDAO +from vistrails.db.versions.v1_0_4.domain import * + +class DBVistrailVariableSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'vistrail_variable' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type'] + table = 'vistrail_variable' + whereMap = global_props + orderBy = 'name' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + name = self.convertFromDB(row[0], 'str', 'varchar(255)') + uuid = self.convertFromDB(row[1], 'str', 'char(36)') + package = self.convertFromDB(row[2], 'str', 'varchar(255)') + module = self.convertFromDB(row[3], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[4], 'str', 'varchar(255)') + value = self.convertFromDB(row[5], 'str', 'varchar(8191)') + vistrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + vistrailVariable = DBVistrailVariable(uuid=uuid, + package=package, + module=module, + namespace=namespace, + value=value, + name=name) + vistrailVariable.db_vistrail = vistrail + vistrailVariable.db_entity_id = entity_id + vistrailVariable.db_entity_type = entity_type + vistrailVariable.is_dirty = False + res[('vistrailVariable', name)] = vistrailVariable + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type'] + table = 'vistrail_variable' + whereMap = global_props + orderBy = 'name' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + name = self.convertFromDB(row[0], 'str', 'varchar(255)') + uuid = self.convertFromDB(row[1], 'str', 'char(36)') + package = self.convertFromDB(row[2], 'str', 'varchar(255)') + module = self.convertFromDB(row[3], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[4], 'str', 'varchar(255)') + value = self.convertFromDB(row[5], 'str', 'varchar(8191)') + vistrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + vistrailVariable = DBVistrailVariable(uuid=uuid, + package=package, + module=module, + namespace=namespace, + value=value, + name=name) + vistrailVariable.db_vistrail = vistrail + vistrailVariable.db_entity_id = entity_id + vistrailVariable.db_entity_type = entity_type + vistrailVariable.is_dirty = False + res[('vistrailVariable', name)] = vistrailVariable + return res + + def from_sql_fast(self, obj, all_objects): + if ('vistrail', obj.db_vistrail) in all_objects: + p = all_objects[('vistrail', obj.db_vistrail)] + p.db_add_vistrailVariable(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type'] + table = 'vistrail_variable' + whereMap = {} + whereMap.update(global_props) + if obj.db_name is not None: + keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)') + whereMap['name'] = keyStr + columnMap = {} + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_uuid') and obj.db_uuid is not None: + columnMap['uuid'] = \ + self.convertToDB(obj.db_uuid, 'str', 'char(36)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_module') and obj.db_module is not None: + columnMap['module'] = \ + self.convertToDB(obj.db_module, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type'] + table = 'vistrail_variable' + whereMap = {} + whereMap.update(global_props) + if obj.db_name is not None: + keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)') + whereMap['name'] = keyStr + columnMap = {} + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_uuid') and obj.db_uuid is not None: + columnMap['uuid'] = \ + self.convertToDB(obj.db_uuid, 'str', 'char(36)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_module') and obj.db_module is not None: + columnMap['module'] = \ + self.convertToDB(obj.db_module, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'vistrail_variable' + whereMap = {} + whereMap.update(global_props) + if obj.db_name is not None: + keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)') + whereMap['name'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPortSpecSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'port_spec' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port_spec' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + type = self.convertFromDB(row[2], 'str', 'varchar(255)') + optional = self.convertFromDB(row[3], 'int', 'int') + depth = self.convertFromDB(row[4], 'int', 'int') + sort_key = self.convertFromDB(row[5], 'int', 'int') + min_conns = self.convertFromDB(row[6], 'int', 'int') + max_conns = self.convertFromDB(row[7], 'int', 'int') + parentType = self.convertFromDB(row[8], 'str', 'char(32)') + entity_id = self.convertFromDB(row[9], 'long', 'int') + entity_type = self.convertFromDB(row[10], 'str', 'char(16)') + parent = self.convertFromDB(row[11], 'long', 'long') + + portSpec = DBPortSpec(name=name, + type=type, + optional=optional, + depth=depth, + sort_key=sort_key, + min_conns=min_conns, + max_conns=max_conns, + id=id) + portSpec.db_parentType = parentType + portSpec.db_entity_id = entity_id + portSpec.db_entity_type = entity_type + portSpec.db_parent = parent + portSpec.is_dirty = False + res[('portSpec', id)] = portSpec + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port_spec' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + type = self.convertFromDB(row[2], 'str', 'varchar(255)') + optional = self.convertFromDB(row[3], 'int', 'int') + depth = self.convertFromDB(row[4], 'int', 'int') + sort_key = self.convertFromDB(row[5], 'int', 'int') + min_conns = self.convertFromDB(row[6], 'int', 'int') + max_conns = self.convertFromDB(row[7], 'int', 'int') + parentType = self.convertFromDB(row[8], 'str', 'char(32)') + entity_id = self.convertFromDB(row[9], 'long', 'int') + entity_type = self.convertFromDB(row[10], 'str', 'char(16)') + parent = self.convertFromDB(row[11], 'long', 'long') + + portSpec = DBPortSpec(name=name, + type=type, + optional=optional, + depth=depth, + sort_key=sort_key, + min_conns=min_conns, + max_conns=max_conns, + id=id) + portSpec.db_parentType = parentType + portSpec.db_entity_id = entity_id + portSpec.db_entity_type = entity_type + portSpec.db_parent = parent + portSpec.is_dirty = False + res[('portSpec', id)] = portSpec + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'module': + p = all_objects[('module', obj.db_parent)] + p.db_add_portSpec(obj) + elif obj.db_parentType == 'module_descriptor': + p = all_objects[('module_descriptor', obj.db_parent)] + p.db_add_portSpec(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port_spec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_optional') and obj.db_optional is not None: + columnMap['optional'] = \ + self.convertToDB(obj.db_optional, 'int', 'int') + if hasattr(obj, 'db_depth') and obj.db_depth is not None: + columnMap['depth'] = \ + self.convertToDB(obj.db_depth, 'int', 'int') + if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None: + columnMap['sort_key'] = \ + self.convertToDB(obj.db_sort_key, 'int', 'int') + if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None: + columnMap['min_conns'] = \ + self.convertToDB(obj.db_min_conns, 'int', 'int') + if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None: + columnMap['max_conns'] = \ + self.convertToDB(obj.db_max_conns, 'int', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port_spec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_optional') and obj.db_optional is not None: + columnMap['optional'] = \ + self.convertToDB(obj.db_optional, 'int', 'int') + if hasattr(obj, 'db_depth') and obj.db_depth is not None: + columnMap['depth'] = \ + self.convertToDB(obj.db_depth, 'int', 'int') + if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None: + columnMap['sort_key'] = \ + self.convertToDB(obj.db_sort_key, 'int', 'int') + if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None: + columnMap['min_conns'] = \ + self.convertToDB(obj.db_min_conns, 'int', 'int') + if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None: + columnMap['max_conns'] = \ + self.convertToDB(obj.db_max_conns, 'int', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_portSpecItems: + child.db_portSpec = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'port_spec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBModuleSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'module' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + module = DBModule(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + id=id) + module.db_parentType = parentType + module.db_entity_id = entity_id + module.db_entity_type = entity_type + module.db_parent = parent + module.is_dirty = False + res[('module', id)] = module + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + module = DBModule(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + id=id) + module.db_parentType = parentType + module.db_entity_id = entity_id + module.db_entity_type = entity_type + module.db_parent = parent + module.is_dirty = False + res[('module', id)] = module + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_module(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_location is not None: + child = obj.db_location + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_functions: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_controlParameters: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_portSpecs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'module' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBModuleDescriptorSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'module_descriptor' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type'] + table = 'module_descriptor' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + package = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package_version = self.convertFromDB(row[4], 'str', 'varchar(255)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + base_descriptor_id = self.convertFromDB(row[6], 'long', 'int') + package = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + module_descriptor = DBModuleDescriptor(name=name, + package=package, + namespace=namespace, + package_version=package_version, + version=version, + base_descriptor_id=base_descriptor_id, + id=id) + module_descriptor.db_package = package + module_descriptor.db_entity_id = entity_id + module_descriptor.db_entity_type = entity_type + module_descriptor.is_dirty = False + res[('module_descriptor', id)] = module_descriptor + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type'] + table = 'module_descriptor' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + package = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package_version = self.convertFromDB(row[4], 'str', 'varchar(255)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + base_descriptor_id = self.convertFromDB(row[6], 'long', 'int') + package = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + module_descriptor = DBModuleDescriptor(name=name, + package=package, + namespace=namespace, + package_version=package_version, + version=version, + base_descriptor_id=base_descriptor_id, + id=id) + module_descriptor.db_package = package + module_descriptor.db_entity_id = entity_id + module_descriptor.db_entity_type = entity_type + module_descriptor.is_dirty = False + res[('module_descriptor', id)] = module_descriptor + return res + + def from_sql_fast(self, obj, all_objects): + if ('package', obj.db_package) in all_objects: + p = all_objects[('package', obj.db_package)] + p.db_add_module_descriptor(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type'] + table = 'module_descriptor' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package_version') and obj.db_package_version is not None: + columnMap['package_version'] = \ + self.convertToDB(obj.db_package_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None: + columnMap['base_descriptor_id'] = \ + self.convertToDB(obj.db_base_descriptor_id, 'long', 'int') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_package, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type'] + table = 'module_descriptor' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package_version') and obj.db_package_version is not None: + columnMap['package_version'] = \ + self.convertToDB(obj.db_package_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None: + columnMap['base_descriptor_id'] = \ + self.convertToDB(obj.db_base_descriptor_id, 'long', 'int') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_package, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_portSpecs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'module_descriptor' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBTagSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'tag' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'tag' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[2], 'long', 'int') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + + tag = DBTag(name=name, + id=id) + tag.db_vistrail = vistrail + tag.db_entity_id = entity_id + tag.db_entity_type = entity_type + tag.is_dirty = False + res[('tag', id)] = tag + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'tag' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[2], 'long', 'int') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + + tag = DBTag(name=name, + id=id) + tag.db_vistrail = vistrail + tag.db_entity_id = entity_id + tag.db_entity_type = entity_type + tag.is_dirty = False + res[('tag', id)] = tag + return res + + def from_sql_fast(self, obj, all_objects): + if ('vistrail', obj.db_vistrail) in all_objects: + p = all_objects[('vistrail', obj.db_vistrail)] + p.db_add_tag(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'tag' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'tag' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'tag' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPortSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'port' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + type = self.convertFromDB(row[1], 'str', 'varchar(255)') + moduleId = self.convertFromDB(row[2], 'long', 'int') + moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)') + name = self.convertFromDB(row[4], 'str', 'varchar(255)') + signature = self.convertFromDB(row[5], 'str', 'varchar(4095)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + port = DBPort(type=type, + moduleId=moduleId, + moduleName=moduleName, + name=name, + signature=signature, + id=id) + port.db_parentType = parentType + port.db_entity_id = entity_id + port.db_entity_type = entity_type + port.db_parent = parent + port.is_dirty = False + res[('port', id)] = port + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + type = self.convertFromDB(row[1], 'str', 'varchar(255)') + moduleId = self.convertFromDB(row[2], 'long', 'int') + moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)') + name = self.convertFromDB(row[4], 'str', 'varchar(255)') + signature = self.convertFromDB(row[5], 'str', 'varchar(4095)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + port = DBPort(type=type, + moduleId=moduleId, + moduleName=moduleName, + name=name, + signature=signature, + id=id) + port.db_parentType = parentType + port.db_entity_id = entity_id + port.db_entity_type = entity_type + port.db_parent = parent + port.is_dirty = False + res[('port', id)] = port + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'connection': + p = all_objects[('connection', obj.db_parent)] + p.db_add_port(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None: + columnMap['moduleId'] = \ + self.convertToDB(obj.db_moduleId, 'long', 'int') + if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None: + columnMap['moduleName'] = \ + self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_signature') and obj.db_signature is not None: + columnMap['signature'] = \ + self.convertToDB(obj.db_signature, 'str', 'varchar(4095)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'port' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None: + columnMap['moduleId'] = \ + self.convertToDB(obj.db_moduleId, 'long', 'int') + if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None: + columnMap['moduleName'] = \ + self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_signature') and obj.db_signature is not None: + columnMap['signature'] = \ + self.convertToDB(obj.db_signature, 'str', 'varchar(4095)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'port' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBGroupSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'group_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + group = DBGroup(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + id=id) + group.db_parentType = parentType + group.db_entity_id = entity_id + group.db_entity_type = entity_type + group.db_parent = parent + group.is_dirty = False + res[('group', id)] = group + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + group = DBGroup(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + id=id) + group.db_parentType = parentType + group.db_entity_id = entity_id + group.db_entity_type = entity_type + group.db_parent = parent + group.is_dirty = False + res[('group', id)] = group + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_module(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_workflow is not None: + child = obj.db_workflow + child.db_group = obj.db_id + if obj.db_location is not None: + child = obj.db_location + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_functions: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_controlParameters: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'group_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBLogSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'log_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id'] + table = 'log_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + vistrail_id = self.convertFromDB(row[5], 'long', 'int') + + log = DBLog(entity_type=entity_type, + version=version, + name=name, + last_modified=last_modified, + vistrail_id=vistrail_id, + id=id) + log.is_dirty = False + res[('log', id)] = log + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id'] + table = 'log_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + vistrail_id = self.convertFromDB(row[5], 'long', 'int') + + log = DBLog(entity_type=entity_type, + version=version, + name=name, + last_modified=last_modified, + vistrail_id=vistrail_id, + id=id) + log.is_dirty = False + res[('log', id)] = log + return res + + def from_sql_fast(self, obj, all_objects): + pass + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id'] + table = 'log_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None: + columnMap['vistrail_id'] = \ + self.convertToDB(obj.db_vistrail_id, 'long', 'int') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id'] + table = 'log_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None: + columnMap['vistrail_id'] = \ + self.convertToDB(obj.db_vistrail_id, 'long', 'int') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_workflow_execs: + child.db_log = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'log_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBLoopIterationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'loop_iteration' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type'] + table = 'loop_iteration' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + iteration = self.convertFromDB(row[3], 'int', 'int') + completed = self.convertFromDB(row[4], 'int', 'int') + error = self.convertFromDB(row[5], 'str', 'varchar(1023)') + parent = self.convertFromDB(row[6], 'str', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + loop_iteration = DBLoopIteration(ts_start=ts_start, + ts_end=ts_end, + iteration=iteration, + completed=completed, + error=error, + id=id) + loop_iteration.db_parent = parent + loop_iteration.db_entity_id = entity_id + loop_iteration.db_entity_type = entity_type + loop_iteration.is_dirty = False + res[('loop_iteration', id)] = loop_iteration + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type'] + table = 'loop_iteration' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + iteration = self.convertFromDB(row[3], 'int', 'int') + completed = self.convertFromDB(row[4], 'int', 'int') + error = self.convertFromDB(row[5], 'str', 'varchar(1023)') + parent = self.convertFromDB(row[6], 'str', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + loop_iteration = DBLoopIteration(ts_start=ts_start, + ts_end=ts_end, + iteration=iteration, + completed=completed, + error=error, + id=id) + loop_iteration.db_parent = parent + loop_iteration.db_entity_id = entity_id + loop_iteration.db_entity_type = entity_type + loop_iteration.is_dirty = False + res[('loop_iteration', id)] = loop_iteration + return res + + def from_sql_fast(self, obj, all_objects): + if ('loop_exec', obj.db_parent) in all_objects: + p = all_objects[('loop_exec', obj.db_parent)] + p.db_add_loop_iteration(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type'] + table = 'loop_iteration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_iteration') and obj.db_iteration is not None: + columnMap['iteration'] = \ + self.convertToDB(obj.db_iteration, 'int', 'int') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'str', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type'] + table = 'loop_iteration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_iteration') and obj.db_iteration is not None: + columnMap['iteration'] = \ + self.convertToDB(obj.db_iteration, 'int', 'int') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'str', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_item_execs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'loop_iteration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashupSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashup' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + version = self.convertFromDB(row[2], 'long', 'int') + type = self.convertFromDB(row[3], 'str', 'varchar(255)') + vtid = self.convertFromDB(row[4], 'long', 'int') + layout = self.convertFromDB(row[5], 'str', 'mediumtext') + geometry = self.convertFromDB(row[6], 'str', 'mediumtext') + has_seq = self.convertFromDB(row[7], 'int', 'int') + parent = self.convertFromDB(row[8], 'long', 'int') + entity_id = self.convertFromDB(row[9], 'long', 'int') + entity_type = self.convertFromDB(row[10], 'str', 'char(16)') + + mashup = DBMashup(name=name, + version=version, + type=type, + vtid=vtid, + layout=layout, + geometry=geometry, + has_seq=has_seq, + id=id) + mashup.db_parent = parent + mashup.db_entity_id = entity_id + mashup.db_entity_type = entity_type + mashup.is_dirty = False + res[('mashup', id)] = mashup + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + version = self.convertFromDB(row[2], 'long', 'int') + type = self.convertFromDB(row[3], 'str', 'varchar(255)') + vtid = self.convertFromDB(row[4], 'long', 'int') + layout = self.convertFromDB(row[5], 'str', 'mediumtext') + geometry = self.convertFromDB(row[6], 'str', 'mediumtext') + has_seq = self.convertFromDB(row[7], 'int', 'int') + parent = self.convertFromDB(row[8], 'long', 'int') + entity_id = self.convertFromDB(row[9], 'long', 'int') + entity_type = self.convertFromDB(row[10], 'str', 'char(16)') + + mashup = DBMashup(name=name, + version=version, + type=type, + vtid=vtid, + layout=layout, + geometry=geometry, + has_seq=has_seq, + id=id) + mashup.db_parent = parent + mashup.db_entity_id = entity_id + mashup.db_entity_type = entity_type + mashup.is_dirty = False + res[('mashup', id)] = mashup + return res + + def from_sql_fast(self, obj, all_objects): + if ('mashup_action', obj.db_parent) in all_objects: + p = all_objects[('mashup_action', obj.db_parent)] + p.db_add_mashup(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_vtid') and obj.db_vtid is not None: + columnMap['vtid'] = \ + self.convertToDB(obj.db_vtid, 'long', 'int') + if hasattr(obj, 'db_layout') and obj.db_layout is not None: + columnMap['layout'] = \ + self.convertToDB(obj.db_layout, 'str', 'mediumtext') + if hasattr(obj, 'db_geometry') and obj.db_geometry is not None: + columnMap['geometry'] = \ + self.convertToDB(obj.db_geometry, 'str', 'mediumtext') + if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None: + columnMap['has_seq'] = \ + self.convertToDB(obj.db_has_seq, 'int', 'int') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_vtid') and obj.db_vtid is not None: + columnMap['vtid'] = \ + self.convertToDB(obj.db_vtid, 'long', 'int') + if hasattr(obj, 'db_layout') and obj.db_layout is not None: + columnMap['layout'] = \ + self.convertToDB(obj.db_layout, 'str', 'mediumtext') + if hasattr(obj, 'db_geometry') and obj.db_geometry is not None: + columnMap['geometry'] = \ + self.convertToDB(obj.db_geometry, 'str', 'mediumtext') + if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None: + columnMap['has_seq'] = \ + self.convertToDB(obj.db_has_seq, 'int', 'int') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_aliases: + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'mashup' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPortSpecItemSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'port_spec_item' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'port_spec_item' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + module = self.convertFromDB(row[2], 'str', 'varchar(255)') + package = self.convertFromDB(row[3], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[4], 'str', 'varchar(255)') + label = self.convertFromDB(row[5], 'str', 'varchar(4095)') + default = self.convertFromDB(row[6], 'str', 'varchar(4095)') + values = self.convertFromDB(row[7], 'str', 'mediumtext') + entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)') + portSpec = self.convertFromDB(row[9], 'long', 'int') + entity_id = self.convertFromDB(row[10], 'long', 'int') + entity_type = self.convertFromDB(row[11], 'str', 'char(16)') + + portSpecItem = DBPortSpecItem(pos=pos, + module=module, + package=package, + namespace=namespace, + label=label, + default=default, + values=values, + entry_type=entry_type, + id=id) + portSpecItem.db_portSpec = portSpec + portSpecItem.db_entity_id = entity_id + portSpecItem.db_entity_type = entity_type + portSpecItem.is_dirty = False + res[('portSpecItem', id)] = portSpecItem + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'port_spec_item' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + module = self.convertFromDB(row[2], 'str', 'varchar(255)') + package = self.convertFromDB(row[3], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[4], 'str', 'varchar(255)') + label = self.convertFromDB(row[5], 'str', 'varchar(4095)') + default = self.convertFromDB(row[6], 'str', 'varchar(4095)') + values = self.convertFromDB(row[7], 'str', 'mediumtext') + entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)') + portSpec = self.convertFromDB(row[9], 'long', 'int') + entity_id = self.convertFromDB(row[10], 'long', 'int') + entity_type = self.convertFromDB(row[11], 'str', 'char(16)') + + portSpecItem = DBPortSpecItem(pos=pos, + module=module, + package=package, + namespace=namespace, + label=label, + default=default, + values=values, + entry_type=entry_type, + id=id) + portSpecItem.db_portSpec = portSpec + portSpecItem.db_entity_id = entity_id + portSpecItem.db_entity_type = entity_type + portSpecItem.is_dirty = False + res[('portSpecItem', id)] = portSpecItem + return res + + def from_sql_fast(self, obj, all_objects): + if ('portSpec', obj.db_portSpec) in all_objects: + p = all_objects[('portSpec', obj.db_portSpec)] + p.db_add_portSpecItem(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'port_spec_item' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_module') and obj.db_module is not None: + columnMap['module'] = \ + self.convertToDB(obj.db_module, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_label') and obj.db_label is not None: + columnMap['label'] = \ + self.convertToDB(obj.db_label, 'str', 'varchar(4095)') + if hasattr(obj, 'db_default') and obj.db_default is not None: + columnMap['_default'] = \ + self.convertToDB(obj.db_default, 'str', 'varchar(4095)') + if hasattr(obj, 'db_values') and obj.db_values is not None: + columnMap['_values'] = \ + self.convertToDB(obj.db_values, 'str', 'mediumtext') + if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None: + columnMap['entry_type'] = \ + self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_portSpec, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'port_spec_item' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_module') and obj.db_module is not None: + columnMap['module'] = \ + self.convertToDB(obj.db_module, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_label') and obj.db_label is not None: + columnMap['label'] = \ + self.convertToDB(obj.db_label, 'str', 'varchar(4095)') + if hasattr(obj, 'db_default') and obj.db_default is not None: + columnMap['_default'] = \ + self.convertToDB(obj.db_default, 'str', 'varchar(4095)') + if hasattr(obj, 'db_values') and obj.db_values is not None: + columnMap['_values'] = \ + self.convertToDB(obj.db_values, 'str', 'mediumtext') + if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None: + columnMap['entry_type'] = \ + self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_portSpec, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'port_spec_item' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMachineSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'machine' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type'] + table = 'machine' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + os = self.convertFromDB(row[2], 'str', 'varchar(255)') + architecture = self.convertFromDB(row[3], 'str', 'varchar(255)') + processor = self.convertFromDB(row[4], 'str', 'varchar(255)') + ram = self.convertFromDB(row[5], 'int', 'bigint') + vistrailId = self.convertFromDB(row[6], 'long', 'int') + workflow_exec = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + machine = DBMachine(name=name, + os=os, + architecture=architecture, + processor=processor, + ram=ram, + id=id) + machine.db_vistrailId = vistrailId + machine.db_workflow_exec = workflow_exec + machine.db_entity_id = entity_id + machine.db_entity_type = entity_type + machine.is_dirty = False + res[('machine', id)] = machine + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type'] + table = 'machine' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + os = self.convertFromDB(row[2], 'str', 'varchar(255)') + architecture = self.convertFromDB(row[3], 'str', 'varchar(255)') + processor = self.convertFromDB(row[4], 'str', 'varchar(255)') + ram = self.convertFromDB(row[5], 'int', 'bigint') + vistrailId = self.convertFromDB(row[6], 'long', 'int') + workflow_exec = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + machine = DBMachine(name=name, + os=os, + architecture=architecture, + processor=processor, + ram=ram, + id=id) + machine.db_vistrailId = vistrailId + machine.db_workflow_exec = workflow_exec + machine.db_entity_id = entity_id + machine.db_entity_type = entity_type + machine.is_dirty = False + res[('machine', id)] = machine + return res + + def from_sql_fast(self, obj, all_objects): + if ('workflow_exec', obj.db_workflow_exec) in all_objects: + p = all_objects[('workflow_exec', obj.db_workflow_exec)] + p.db_add_machine(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type'] + table = 'machine' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_os') and obj.db_os is not None: + columnMap['os'] = \ + self.convertToDB(obj.db_os, 'str', 'varchar(255)') + if hasattr(obj, 'db_architecture') and obj.db_architecture is not None: + columnMap['architecture'] = \ + self.convertToDB(obj.db_architecture, 'str', 'varchar(255)') + if hasattr(obj, 'db_processor') and obj.db_processor is not None: + columnMap['processor'] = \ + self.convertToDB(obj.db_processor, 'str', 'varchar(255)') + if hasattr(obj, 'db_ram') and obj.db_ram is not None: + columnMap['ram'] = \ + self.convertToDB(obj.db_ram, 'int', 'bigint') + if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None: + columnMap['vt_id'] = \ + self.convertToDB(obj.db_vistrailId, 'long', 'int') + if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None: + columnMap['log_id'] = \ + self.convertToDB(obj.db_workflow_exec, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type'] + table = 'machine' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_os') and obj.db_os is not None: + columnMap['os'] = \ + self.convertToDB(obj.db_os, 'str', 'varchar(255)') + if hasattr(obj, 'db_architecture') and obj.db_architecture is not None: + columnMap['architecture'] = \ + self.convertToDB(obj.db_architecture, 'str', 'varchar(255)') + if hasattr(obj, 'db_processor') and obj.db_processor is not None: + columnMap['processor'] = \ + self.convertToDB(obj.db_processor, 'str', 'varchar(255)') + if hasattr(obj, 'db_ram') and obj.db_ram is not None: + columnMap['ram'] = \ + self.convertToDB(obj.db_ram, 'int', 'bigint') + if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None: + columnMap['vt_id'] = \ + self.convertToDB(obj.db_vistrailId, 'long', 'int') + if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None: + columnMap['log_id'] = \ + self.convertToDB(obj.db_workflow_exec, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'machine' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBAddSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'add_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'add_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + objectId = self.convertFromDB(row[2], 'long', 'int') + parentObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjType = self.convertFromDB(row[4], 'str', 'char(16)') + action = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + add = DBAdd(what=what, + objectId=objectId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + add.db_action = action + add.db_entity_id = entity_id + add.db_entity_type = entity_type + add.is_dirty = False + res[('add', id)] = add + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'add_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + objectId = self.convertFromDB(row[2], 'long', 'int') + parentObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjType = self.convertFromDB(row[4], 'str', 'char(16)') + action = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + add = DBAdd(what=what, + objectId=objectId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + add.db_action = action + add.db_entity_id = entity_id + add.db_entity_type = entity_type + add.is_dirty = False + res[('add', id)] = add + return res + + def from_sql_fast(self, obj, all_objects): + if ('action', obj.db_action) in all_objects: + p = all_objects[('action', obj.db_action)] + p.db_add_operation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'add_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_objectId') and obj.db_objectId is not None: + columnMap['object_id'] = \ + self.convertToDB(obj.db_objectId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'add_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_objectId') and obj.db_objectId is not None: + columnMap['object_id'] = \ + self.convertToDB(obj.db_objectId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_data is not None: + child = obj.db_data + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'add_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBOtherSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'other' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'other' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + other = DBOther(key=key, + value=value, + id=id) + other.db_parentType = parentType + other.db_entity_id = entity_id + other.db_entity_type = entity_type + other.db_parent = parent + other.is_dirty = False + res[('other', id)] = other + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'other' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + other = DBOther(key=key, + value=value, + id=id) + other.db_parentType = parentType + other.db_entity_id = entity_id + other.db_entity_type = entity_type + other.db_parent = parent + other.is_dirty = False + res[('other', id)] = other + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_other(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'other' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['okey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'other' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['okey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'other' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBLocationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'location' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'location' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)') + y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + location = DBLocation(x=x, + y=y, + id=id) + location.db_parentType = parentType + location.db_entity_id = entity_id + location.db_entity_type = entity_type + location.db_parent = parent + location.is_dirty = False + res[('location', id)] = location + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'location' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)') + y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + location = DBLocation(x=x, + y=y, + id=id) + location.db_parentType = parentType + location.db_entity_id = entity_id + location.db_entity_type = entity_type + location.db_parent = parent + location.is_dirty = False + res[('location', id)] = location + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'module': + p = all_objects[('module', obj.db_parent)] + p.db_add_location(obj) + elif obj.db_parentType == 'abstraction': + p = all_objects[('abstraction', obj.db_parent)] + p.db_add_location(obj) + elif obj.db_parentType == 'group': + p = all_objects[('group', obj.db_parent)] + p.db_add_location(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'location' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_x') and obj.db_x is not None: + columnMap['x'] = \ + self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)') + if hasattr(obj, 'db_y') and obj.db_y is not None: + columnMap['y'] = \ + self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'location' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_x') and obj.db_x is not None: + columnMap['x'] = \ + self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)') + if hasattr(obj, 'db_y') and obj.db_y is not None: + columnMap['y'] = \ + self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'location' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPEParameterSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'pe_parameter' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_parameter' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)') + value = self.convertFromDB(row[3], 'str', 'mediumtext') + dimension = self.convertFromDB(row[4], 'long', 'int') + parentType = self.convertFromDB(row[5], 'str', 'char(32)') + pe_function = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + pe_parameter = DBPEParameter(pos=pos, + interpolator=interpolator, + value=value, + dimension=dimension, + id=id) + pe_parameter.db_parentType = parentType + pe_parameter.db_pe_function = pe_function + pe_parameter.db_entity_id = entity_id + pe_parameter.db_entity_type = entity_type + pe_parameter.is_dirty = False + res[('pe_parameter', id)] = pe_parameter + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_parameter' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)') + value = self.convertFromDB(row[3], 'str', 'mediumtext') + dimension = self.convertFromDB(row[4], 'long', 'int') + parentType = self.convertFromDB(row[5], 'str', 'char(32)') + pe_function = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + pe_parameter = DBPEParameter(pos=pos, + interpolator=interpolator, + value=value, + dimension=dimension, + id=id) + pe_parameter.db_parentType = parentType + pe_parameter.db_pe_function = pe_function + pe_parameter.db_entity_id = entity_id + pe_parameter.db_entity_type = entity_type + pe_parameter.is_dirty = False + res[('pe_parameter', id)] = pe_parameter + return res + + def from_sql_fast(self, obj, all_objects): + if ('pe_function', obj.db_pe_function) in all_objects: + p = all_objects[('pe_function', obj.db_pe_function)] + p.db_add_parameter(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None: + columnMap['interpolator'] = \ + self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_dimension') and obj.db_dimension is not None: + columnMap['dimension'] = \ + self.convertToDB(obj.db_dimension, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_pe_function, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None: + columnMap['interpolator'] = \ + self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_dimension') and obj.db_dimension is not None: + columnMap['dimension'] = \ + self.convertToDB(obj.db_dimension, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_pe_function, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'pe_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBParameterSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'parameter' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'parameter' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + type = self.convertFromDB(row[3], 'str', 'varchar(255)') + val = self.convertFromDB(row[4], 'str', 'mediumtext') + alias = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + parameter = DBParameter(pos=pos, + name=name, + type=type, + val=val, + alias=alias, + id=id) + parameter.db_parentType = parentType + parameter.db_entity_id = entity_id + parameter.db_entity_type = entity_type + parameter.db_parent = parent + parameter.is_dirty = False + res[('parameter', id)] = parameter + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'parameter' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + type = self.convertFromDB(row[3], 'str', 'varchar(255)') + val = self.convertFromDB(row[4], 'str', 'mediumtext') + alias = self.convertFromDB(row[5], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[6], 'str', 'char(32)') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + parent = self.convertFromDB(row[9], 'long', 'long') + + parameter = DBParameter(pos=pos, + name=name, + type=type, + val=val, + alias=alias, + id=id) + parameter.db_parentType = parentType + parameter.db_entity_id = entity_id + parameter.db_entity_type = entity_type + parameter.db_parent = parent + parameter.is_dirty = False + res[('parameter', id)] = parameter + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'function': + p = all_objects[('function', obj.db_parent)] + p.db_add_parameter(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_val') and obj.db_val is not None: + columnMap['val'] = \ + self.convertToDB(obj.db_val, 'str', 'mediumtext') + if hasattr(obj, 'db_alias') and obj.db_alias is not None: + columnMap['alias'] = \ + self.convertToDB(obj.db_alias, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_val') and obj.db_val is not None: + columnMap['val'] = \ + self.convertToDB(obj.db_val, 'str', 'mediumtext') + if hasattr(obj, 'db_alias') and obj.db_alias is not None: + columnMap['alias'] = \ + self.convertToDB(obj.db_alias, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPluginDataSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'plugin_data' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'plugin_data' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + data = self.convertFromDB(row[1], 'str', 'varchar(8191)') + parentType = self.convertFromDB(row[2], 'str', 'char(32)') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + parent = self.convertFromDB(row[5], 'long', 'long') + + plugin_data = DBPluginData(data=data, + id=id) + plugin_data.db_parentType = parentType + plugin_data.db_entity_id = entity_id + plugin_data.db_entity_type = entity_type + plugin_data.db_parent = parent + plugin_data.is_dirty = False + res[('plugin_data', id)] = plugin_data + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'plugin_data' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + data = self.convertFromDB(row[1], 'str', 'varchar(8191)') + parentType = self.convertFromDB(row[2], 'str', 'char(32)') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + parent = self.convertFromDB(row[5], 'long', 'long') + + plugin_data = DBPluginData(data=data, + id=id) + plugin_data.db_parentType = parentType + plugin_data.db_entity_id = entity_id + plugin_data.db_entity_type = entity_type + plugin_data.db_parent = parent + plugin_data.is_dirty = False + res[('plugin_data', id)] = plugin_data + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_plugin_data(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'plugin_data' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_data') and obj.db_data is not None: + columnMap['data'] = \ + self.convertToDB(obj.db_data, 'str', 'varchar(8191)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'plugin_data' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_data') and obj.db_data is not None: + columnMap['data'] = \ + self.convertToDB(obj.db_data, 'str', 'varchar(8191)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'plugin_data' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBFunctionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'function' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'function' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + function = DBFunction(pos=pos, + name=name, + id=id) + function.db_parentType = parentType + function.db_entity_id = entity_id + function.db_entity_type = entity_type + function.db_parent = parent + function.is_dirty = False + res[('function', id)] = function + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'function' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + pos = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + function = DBFunction(pos=pos, + name=name, + id=id) + function.db_parentType = parentType + function.db_entity_id = entity_id + function.db_entity_type = entity_type + function.db_parent = parent + function.is_dirty = False + res[('function', id)] = function + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'module': + p = all_objects[('module', obj.db_parent)] + p.db_add_function(obj) + elif obj.db_parentType == 'abstraction': + p = all_objects[('abstraction', obj.db_parent)] + p.db_add_function(obj) + elif obj.db_parentType == 'group': + p = all_objects[('group', obj.db_parent)] + p.db_add_function(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_parameters: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBActionAnnotationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'action_annotation' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action_annotation' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(8191)') + action_id = self.convertFromDB(row[3], 'long', 'int') + date = self.convertFromDB(row[4], 'datetime', 'datetime') + user = self.convertFromDB(row[5], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + actionAnnotation = DBActionAnnotation(key=key, + value=value, + action_id=action_id, + date=date, + user=user, + id=id) + actionAnnotation.db_vistrail = vistrail + actionAnnotation.db_entity_id = entity_id + actionAnnotation.db_entity_type = entity_type + actionAnnotation.is_dirty = False + res[('actionAnnotation', id)] = actionAnnotation + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action_annotation' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(8191)') + action_id = self.convertFromDB(row[3], 'long', 'int') + date = self.convertFromDB(row[4], 'datetime', 'datetime') + user = self.convertFromDB(row[5], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + actionAnnotation = DBActionAnnotation(key=key, + value=value, + action_id=action_id, + date=date, + user=user, + id=id) + actionAnnotation.db_vistrail = vistrail + actionAnnotation.db_entity_id = entity_id + actionAnnotation.db_entity_type = entity_type + actionAnnotation.is_dirty = False + res[('actionAnnotation', id)] = actionAnnotation + return res + + def from_sql_fast(self, obj, all_objects): + if ('vistrail', obj.db_vistrail) in all_objects: + p = all_objects[('vistrail', obj.db_vistrail)] + p.db_add_actionAnnotation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBAbstractionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'abstraction' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'abstraction' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[7], 'str', 'char(32)') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + parent = self.convertFromDB(row[10], 'long', 'long') + + abstraction = DBAbstraction(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + internal_version=internal_version, + id=id) + abstraction.db_parentType = parentType + abstraction.db_entity_id = entity_id + abstraction.db_entity_type = entity_type + abstraction.db_parent = parent + abstraction.is_dirty = False + res[('abstraction', id)] = abstraction + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'abstraction' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + cache = self.convertFromDB(row[1], 'int', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + namespace = self.convertFromDB(row[3], 'str', 'varchar(255)') + package = self.convertFromDB(row[4], 'str', 'varchar(511)') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)') + parentType = self.convertFromDB(row[7], 'str', 'char(32)') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + parent = self.convertFromDB(row[10], 'long', 'long') + + abstraction = DBAbstraction(cache=cache, + name=name, + namespace=namespace, + package=package, + version=version, + internal_version=internal_version, + id=id) + abstraction.db_parentType = parentType + abstraction.db_entity_id = entity_id + abstraction.db_entity_type = entity_type + abstraction.db_parent = parent + abstraction.is_dirty = False + res[('abstraction', id)] = abstraction + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_module(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'abstraction' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None: + columnMap['internal_version'] = \ + self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'abstraction' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_cache') and obj.db_cache is not None: + columnMap['cache'] = \ + self.convertToDB(obj.db_cache, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_namespace') and obj.db_namespace is not None: + columnMap['namespace'] = \ + self.convertToDB(obj.db_namespace, 'str', 'varchar(255)') + if hasattr(obj, 'db_package') and obj.db_package is not None: + columnMap['package'] = \ + self.convertToDB(obj.db_package, 'str', 'varchar(511)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None: + columnMap['internal_version'] = \ + self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_location is not None: + child = obj.db_location + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_functions: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_controlParameters: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'abstraction' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashupAliasSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashup_alias' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_alias' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + parent = self.convertFromDB(row[2], 'long', 'int') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + + mashup_alias = DBMashupAlias(name=name, + id=id) + mashup_alias.db_parent = parent + mashup_alias.db_entity_id = entity_id + mashup_alias.db_entity_type = entity_type + mashup_alias.is_dirty = False + res[('mashup_alias', id)] = mashup_alias + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_alias' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + parent = self.convertFromDB(row[2], 'long', 'int') + entity_id = self.convertFromDB(row[3], 'long', 'int') + entity_type = self.convertFromDB(row[4], 'str', 'char(16)') + + mashup_alias = DBMashupAlias(name=name, + id=id) + mashup_alias.db_parent = parent + mashup_alias.db_entity_id = entity_id + mashup_alias.db_entity_type = entity_type + mashup_alias.is_dirty = False + res[('mashup_alias', id)] = mashup_alias + return res + + def from_sql_fast(self, obj, all_objects): + if ('mashup', obj.db_parent) in all_objects: + p = all_objects[('mashup', obj.db_parent)] + p.db_add_alias(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_alias' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_alias' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_component is not None: + child = obj.db_component + child.db_mashup_alias = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'mashup_alias' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBWorkflowSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'workflow' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id'] + table = 'workflow' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_id = self.convertFromDB(row[1], 'long', 'int') + entity_type = self.convertFromDB(row[2], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + version = self.convertFromDB(row[4], 'str', 'char(16)') + last_modified = self.convertFromDB(row[5], 'datetime', 'datetime') + vistrail_id = self.convertFromDB(row[6], 'long', 'int') + group = self.convertFromDB(row[7], 'long', 'int') + + workflow = DBWorkflow(entity_type=entity_type, + name=name, + version=version, + last_modified=last_modified, + vistrail_id=vistrail_id, + id=id) + workflow.db_entity_id = entity_id + workflow.db_group = group + workflow.is_dirty = False + res[('workflow', id)] = workflow + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id'] + table = 'workflow' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_id = self.convertFromDB(row[1], 'long', 'int') + entity_type = self.convertFromDB(row[2], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + version = self.convertFromDB(row[4], 'str', 'char(16)') + last_modified = self.convertFromDB(row[5], 'datetime', 'datetime') + vistrail_id = self.convertFromDB(row[6], 'long', 'int') + group = self.convertFromDB(row[7], 'long', 'int') + + workflow = DBWorkflow(entity_type=entity_type, + name=name, + version=version, + last_modified=last_modified, + vistrail_id=vistrail_id, + id=id) + workflow.db_entity_id = entity_id + workflow.db_group = group + workflow.is_dirty = False + res[('workflow', id)] = workflow + return res + + def from_sql_fast(self, obj, all_objects): + if ('group', obj.db_group) in all_objects: + p = all_objects[('group', obj.db_group)] + p.db_add_workflow(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id'] + table = 'workflow' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None: + columnMap['vistrail_id'] = \ + self.convertToDB(obj.db_vistrail_id, 'long', 'int') + if hasattr(obj, 'db_group') and obj.db_group is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_group, 'long', 'int') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id'] + table = 'workflow' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None: + columnMap['vistrail_id'] = \ + self.convertToDB(obj.db_vistrail_id, 'long', 'int') + if hasattr(obj, 'db_group') and obj.db_group is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_group, 'long', 'int') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_connections: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_plugin_datas: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_others: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_modules: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'workflow' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashupActionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashup_action' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + prevId = self.convertFromDB(row[1], 'long', 'int') + date = self.convertFromDB(row[2], 'datetime', 'datetime') + user = self.convertFromDB(row[3], 'str', 'varchar(255)') + mashuptrail = self.convertFromDB(row[4], 'long', 'int') + entity_id = self.convertFromDB(row[5], 'long', 'int') + entity_type = self.convertFromDB(row[6], 'str', 'char(16)') + + mashup_action = DBMashupAction(prevId=prevId, + date=date, + user=user, + id=id) + mashup_action.db_mashuptrail = mashuptrail + mashup_action.db_entity_id = entity_id + mashup_action.db_entity_type = entity_type + mashup_action.is_dirty = False + res[('mashup_action', id)] = mashup_action + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + prevId = self.convertFromDB(row[1], 'long', 'int') + date = self.convertFromDB(row[2], 'datetime', 'datetime') + user = self.convertFromDB(row[3], 'str', 'varchar(255)') + mashuptrail = self.convertFromDB(row[4], 'long', 'int') + entity_id = self.convertFromDB(row[5], 'long', 'int') + entity_type = self.convertFromDB(row[6], 'str', 'char(16)') + + mashup_action = DBMashupAction(prevId=prevId, + date=date, + user=user, + id=id) + mashup_action.db_mashuptrail = mashuptrail + mashup_action.db_entity_id = entity_id + mashup_action.db_entity_type = entity_type + mashup_action.is_dirty = False + res[('mashup_action', id)] = mashup_action + return res + + def from_sql_fast(self, obj, all_objects): + if ('mashuptrail', obj.db_mashuptrail) in all_objects: + p = all_objects[('mashuptrail', obj.db_mashuptrail)] + p.db_add_action(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_prevId') and obj.db_prevId is not None: + columnMap['prev_id'] = \ + self.convertToDB(obj.db_prevId, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_mashuptrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_prevId') and obj.db_prevId is not None: + columnMap['prev_id'] = \ + self.convertToDB(obj.db_prevId, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_mashuptrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_mashup is not None: + child = obj.db_mashup + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'mashup_action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashuptrailSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashuptrail' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type'] + table = 'mashuptrail' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'char(36)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + vtVersion = self.convertFromDB(row[3], 'long', 'int') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + + mashuptrail = DBMashuptrail(name=name, + version=version, + vtVersion=vtVersion, + last_modified=last_modified, + id=id) + mashuptrail.db_entity_type = entity_type + mashuptrail.is_dirty = False + res[('mashuptrail', id)] = mashuptrail + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type'] + table = 'mashuptrail' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'char(36)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + vtVersion = self.convertFromDB(row[3], 'long', 'int') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + + mashuptrail = DBMashuptrail(name=name, + version=version, + vtVersion=vtVersion, + last_modified=last_modified, + id=id) + mashuptrail.db_entity_type = entity_type + mashuptrail.is_dirty = False + res[('mashuptrail', id)] = mashuptrail + return res + + def from_sql_fast(self, obj, all_objects): + pass + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type'] + table = 'mashuptrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'char(36)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None: + columnMap['vt_version'] = \ + self.convertToDB(obj.db_vtVersion, 'long', 'int') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type'] + table = 'mashuptrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'char(36)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None: + columnMap['vt_version'] = \ + self.convertToDB(obj.db_vtVersion, 'long', 'int') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_actions: + child.db_mashuptrail = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_actionAnnotations: + child.db_mashuptrail = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'mashuptrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBRegistrySQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'registry' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified'] + table = 'registry' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + root_descriptor_id = self.convertFromDB(row[3], 'long', 'int') + name = self.convertFromDB(row[4], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[5], 'datetime', 'datetime') + + registry = DBRegistry(entity_type=entity_type, + version=version, + root_descriptor_id=root_descriptor_id, + name=name, + last_modified=last_modified, + id=id) + registry.is_dirty = False + res[('registry', id)] = registry + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified'] + table = 'registry' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + root_descriptor_id = self.convertFromDB(row[3], 'long', 'int') + name = self.convertFromDB(row[4], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[5], 'datetime', 'datetime') + + registry = DBRegistry(entity_type=entity_type, + version=version, + root_descriptor_id=root_descriptor_id, + name=name, + last_modified=last_modified, + id=id) + registry.is_dirty = False + res[('registry', id)] = registry + return res + + def from_sql_fast(self, obj, all_objects): + pass + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified'] + table = 'registry' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None: + columnMap['root_descriptor_id'] = \ + self.convertToDB(obj.db_root_descriptor_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified'] + table = 'registry' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None: + columnMap['root_descriptor_id'] = \ + self.convertToDB(obj.db_root_descriptor_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_packages: + child.db_registry = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'registry' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashupComponentSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashup_component' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type'] + table = 'mashup_component' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + vtid = self.convertFromDB(row[1], 'long', 'int') + vttype = self.convertFromDB(row[2], 'str', 'varchar(255)') + vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)') + vtparent_id = self.convertFromDB(row[4], 'long', 'int') + vtpos = self.convertFromDB(row[5], 'long', 'int') + vtmid = self.convertFromDB(row[6], 'long', 'int') + pos = self.convertFromDB(row[7], 'long', 'int') + type = self.convertFromDB(row[8], 'str', 'varchar(255)') + val = self.convertFromDB(row[9], 'str', 'mediumtext') + minVal = self.convertFromDB(row[10], 'str', 'varchar(255)') + maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)') + stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)') + strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext') + widget = self.convertFromDB(row[14], 'str', 'varchar(255)') + seq = self.convertFromDB(row[15], 'int', 'int') + parent = self.convertFromDB(row[16], 'str', 'varchar(255)') + mashup_alias = self.convertFromDB(row[17], 'long', 'int') + entity_id = self.convertFromDB(row[18], 'long', 'int') + entity_type = self.convertFromDB(row[19], 'str', 'char(16)') + + mashup_component = DBMashupComponent(vtid=vtid, + vttype=vttype, + vtparent_type=vtparent_type, + vtparent_id=vtparent_id, + vtpos=vtpos, + vtmid=vtmid, + pos=pos, + type=type, + val=val, + minVal=minVal, + maxVal=maxVal, + stepSize=stepSize, + strvaluelist=strvaluelist, + widget=widget, + seq=seq, + parent=parent, + id=id) + mashup_component.db_mashup_alias = mashup_alias + mashup_component.db_entity_id = entity_id + mashup_component.db_entity_type = entity_type + mashup_component.is_dirty = False + res[('mashup_component', id)] = mashup_component + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type'] + table = 'mashup_component' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + vtid = self.convertFromDB(row[1], 'long', 'int') + vttype = self.convertFromDB(row[2], 'str', 'varchar(255)') + vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)') + vtparent_id = self.convertFromDB(row[4], 'long', 'int') + vtpos = self.convertFromDB(row[5], 'long', 'int') + vtmid = self.convertFromDB(row[6], 'long', 'int') + pos = self.convertFromDB(row[7], 'long', 'int') + type = self.convertFromDB(row[8], 'str', 'varchar(255)') + val = self.convertFromDB(row[9], 'str', 'mediumtext') + minVal = self.convertFromDB(row[10], 'str', 'varchar(255)') + maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)') + stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)') + strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext') + widget = self.convertFromDB(row[14], 'str', 'varchar(255)') + seq = self.convertFromDB(row[15], 'int', 'int') + parent = self.convertFromDB(row[16], 'str', 'varchar(255)') + mashup_alias = self.convertFromDB(row[17], 'long', 'int') + entity_id = self.convertFromDB(row[18], 'long', 'int') + entity_type = self.convertFromDB(row[19], 'str', 'char(16)') + + mashup_component = DBMashupComponent(vtid=vtid, + vttype=vttype, + vtparent_type=vtparent_type, + vtparent_id=vtparent_id, + vtpos=vtpos, + vtmid=vtmid, + pos=pos, + type=type, + val=val, + minVal=minVal, + maxVal=maxVal, + stepSize=stepSize, + strvaluelist=strvaluelist, + widget=widget, + seq=seq, + parent=parent, + id=id) + mashup_component.db_mashup_alias = mashup_alias + mashup_component.db_entity_id = entity_id + mashup_component.db_entity_type = entity_type + mashup_component.is_dirty = False + res[('mashup_component', id)] = mashup_component + return res + + def from_sql_fast(self, obj, all_objects): + if ('mashup_alias', obj.db_mashup_alias) in all_objects: + p = all_objects[('mashup_alias', obj.db_mashup_alias)] + p.db_add_component(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type'] + table = 'mashup_component' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_vtid') and obj.db_vtid is not None: + columnMap['vtid'] = \ + self.convertToDB(obj.db_vtid, 'long', 'int') + if hasattr(obj, 'db_vttype') and obj.db_vttype is not None: + columnMap['vttype'] = \ + self.convertToDB(obj.db_vttype, 'str', 'varchar(255)') + if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None: + columnMap['vtparent_type'] = \ + self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)') + if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None: + columnMap['vtparent_id'] = \ + self.convertToDB(obj.db_vtparent_id, 'long', 'int') + if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None: + columnMap['vtpos'] = \ + self.convertToDB(obj.db_vtpos, 'long', 'int') + if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None: + columnMap['vtmid'] = \ + self.convertToDB(obj.db_vtmid, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_val') and obj.db_val is not None: + columnMap['val'] = \ + self.convertToDB(obj.db_val, 'str', 'mediumtext') + if hasattr(obj, 'db_minVal') and obj.db_minVal is not None: + columnMap['minVal'] = \ + self.convertToDB(obj.db_minVal, 'str', 'varchar(255)') + if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None: + columnMap['maxVal'] = \ + self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)') + if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None: + columnMap['stepSize'] = \ + self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)') + if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None: + columnMap['strvaluelist'] = \ + self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext') + if hasattr(obj, 'db_widget') and obj.db_widget is not None: + columnMap['widget'] = \ + self.convertToDB(obj.db_widget, 'str', 'varchar(255)') + if hasattr(obj, 'db_seq') and obj.db_seq is not None: + columnMap['seq'] = \ + self.convertToDB(obj.db_seq, 'int', 'int') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent'] = \ + self.convertToDB(obj.db_parent, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None: + columnMap['alias_id'] = \ + self.convertToDB(obj.db_mashup_alias, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type'] + table = 'mashup_component' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_vtid') and obj.db_vtid is not None: + columnMap['vtid'] = \ + self.convertToDB(obj.db_vtid, 'long', 'int') + if hasattr(obj, 'db_vttype') and obj.db_vttype is not None: + columnMap['vttype'] = \ + self.convertToDB(obj.db_vttype, 'str', 'varchar(255)') + if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None: + columnMap['vtparent_type'] = \ + self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)') + if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None: + columnMap['vtparent_id'] = \ + self.convertToDB(obj.db_vtparent_id, 'long', 'int') + if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None: + columnMap['vtpos'] = \ + self.convertToDB(obj.db_vtpos, 'long', 'int') + if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None: + columnMap['vtmid'] = \ + self.convertToDB(obj.db_vtmid, 'long', 'int') + if hasattr(obj, 'db_pos') and obj.db_pos is not None: + columnMap['pos'] = \ + self.convertToDB(obj.db_pos, 'long', 'int') + if hasattr(obj, 'db_type') and obj.db_type is not None: + columnMap['type'] = \ + self.convertToDB(obj.db_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_val') and obj.db_val is not None: + columnMap['val'] = \ + self.convertToDB(obj.db_val, 'str', 'mediumtext') + if hasattr(obj, 'db_minVal') and obj.db_minVal is not None: + columnMap['minVal'] = \ + self.convertToDB(obj.db_minVal, 'str', 'varchar(255)') + if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None: + columnMap['maxVal'] = \ + self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)') + if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None: + columnMap['stepSize'] = \ + self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)') + if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None: + columnMap['strvaluelist'] = \ + self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext') + if hasattr(obj, 'db_widget') and obj.db_widget is not None: + columnMap['widget'] = \ + self.convertToDB(obj.db_widget, 'str', 'varchar(255)') + if hasattr(obj, 'db_seq') and obj.db_seq is not None: + columnMap['seq'] = \ + self.convertToDB(obj.db_seq, 'int', 'int') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent'] = \ + self.convertToDB(obj.db_parent, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None: + columnMap['alias_id'] = \ + self.convertToDB(obj.db_mashup_alias, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'mashup_component' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBAnnotationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'annotation' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'annotation' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'mediumtext') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + annotation = DBAnnotation(key=key, + value=value, + id=id) + annotation.db_parentType = parentType + annotation.db_entity_id = entity_id + annotation.db_entity_type = entity_type + annotation.db_parent = parent + annotation.is_dirty = False + res[('annotation', id)] = annotation + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'annotation' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'mediumtext') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + annotation = DBAnnotation(key=key, + value=value, + id=id) + annotation.db_parentType = parentType + annotation.db_entity_id = entity_id + annotation.db_entity_type = entity_type + annotation.db_parent = parent + annotation.is_dirty = False + res[('annotation', id)] = annotation + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'vistrail': + p = all_objects[('vistrail', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'module': + p = all_objects[('module', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'workflow_exec': + p = all_objects[('workflow_exec', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'module_exec': + p = all_objects[('module_exec', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'group_exec': + p = all_objects[('group_exec', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'action': + p = all_objects[('action', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'abstraction': + p = all_objects[('abstraction', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'mashuptrail': + p = all_objects[('mashuptrail', obj.db_parent)] + p.db_add_annotation(obj) + elif obj.db_parentType == 'group': + p = all_objects[('group', obj.db_parent)] + p.db_add_annotation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBChangeSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'change_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'change_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + oldObjId = self.convertFromDB(row[2], 'long', 'int') + newObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjId = self.convertFromDB(row[4], 'long', 'int') + parentObjType = self.convertFromDB(row[5], 'str', 'char(16)') + action = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + change = DBChange(what=what, + oldObjId=oldObjId, + newObjId=newObjId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + change.db_action = action + change.db_entity_id = entity_id + change.db_entity_type = entity_type + change.is_dirty = False + res[('change', id)] = change + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'change_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + oldObjId = self.convertFromDB(row[2], 'long', 'int') + newObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjId = self.convertFromDB(row[4], 'long', 'int') + parentObjType = self.convertFromDB(row[5], 'str', 'char(16)') + action = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + change = DBChange(what=what, + oldObjId=oldObjId, + newObjId=newObjId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + change.db_action = action + change.db_entity_id = entity_id + change.db_entity_type = entity_type + change.is_dirty = False + res[('change', id)] = change + return res + + def from_sql_fast(self, obj, all_objects): + if ('action', obj.db_action) in all_objects: + p = all_objects[('action', obj.db_action)] + p.db_add_operation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'change_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None: + columnMap['old_obj_id'] = \ + self.convertToDB(obj.db_oldObjId, 'long', 'int') + if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None: + columnMap['new_obj_id'] = \ + self.convertToDB(obj.db_newObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'change_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None: + columnMap['old_obj_id'] = \ + self.convertToDB(obj.db_oldObjId, 'long', 'int') + if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None: + columnMap['new_obj_id'] = \ + self.convertToDB(obj.db_newObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + if obj.db_data is not None: + child = obj.db_data + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'change_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBGroupExecSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'group_exec' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_exec' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + cached = self.convertFromDB(row[3], 'int', 'int') + module_id = self.convertFromDB(row[4], 'long', 'int') + group_name = self.convertFromDB(row[5], 'str', 'varchar(255)') + group_type = self.convertFromDB(row[6], 'str', 'varchar(255)') + completed = self.convertFromDB(row[7], 'int', 'int') + error = self.convertFromDB(row[8], 'str', 'varchar(1023)') + machine_id = self.convertFromDB(row[9], 'long', 'int') + parentType = self.convertFromDB(row[10], 'str', 'char(32)') + entity_id = self.convertFromDB(row[11], 'long', 'int') + entity_type = self.convertFromDB(row[12], 'str', 'char(16)') + parent = self.convertFromDB(row[13], 'long', 'long') + + group_exec = DBGroupExec(ts_start=ts_start, + ts_end=ts_end, + cached=cached, + module_id=module_id, + group_name=group_name, + group_type=group_type, + completed=completed, + error=error, + machine_id=machine_id, + id=id) + group_exec.db_parentType = parentType + group_exec.db_entity_id = entity_id + group_exec.db_entity_type = entity_type + group_exec.db_parent = parent + group_exec.is_dirty = False + res[('group_exec', id)] = group_exec + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_exec' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + cached = self.convertFromDB(row[3], 'int', 'int') + module_id = self.convertFromDB(row[4], 'long', 'int') + group_name = self.convertFromDB(row[5], 'str', 'varchar(255)') + group_type = self.convertFromDB(row[6], 'str', 'varchar(255)') + completed = self.convertFromDB(row[7], 'int', 'int') + error = self.convertFromDB(row[8], 'str', 'varchar(1023)') + machine_id = self.convertFromDB(row[9], 'long', 'int') + parentType = self.convertFromDB(row[10], 'str', 'char(32)') + entity_id = self.convertFromDB(row[11], 'long', 'int') + entity_type = self.convertFromDB(row[12], 'str', 'char(16)') + parent = self.convertFromDB(row[13], 'long', 'long') + + group_exec = DBGroupExec(ts_start=ts_start, + ts_end=ts_end, + cached=cached, + module_id=module_id, + group_name=group_name, + group_type=group_type, + completed=completed, + error=error, + machine_id=machine_id, + id=id) + group_exec.db_parentType = parentType + group_exec.db_entity_id = entity_id + group_exec.db_entity_type = entity_type + group_exec.db_parent = parent + group_exec.is_dirty = False + res[('group_exec', id)] = group_exec + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow_exec': + p = all_objects[('workflow_exec', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'loop_iteration': + p = all_objects[('loop_iteration', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'group_exec': + p = all_objects[('group_exec', obj.db_parent)] + p.db_add_item_exec(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_cached') and obj.db_cached is not None: + columnMap['cached'] = \ + self.convertToDB(obj.db_cached, 'int', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_group_name') and obj.db_group_name is not None: + columnMap['group_name'] = \ + self.convertToDB(obj.db_group_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_group_type') and obj.db_group_type is not None: + columnMap['group_type'] = \ + self.convertToDB(obj.db_group_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None: + columnMap['machine_id'] = \ + self.convertToDB(obj.db_machine_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'group_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_cached') and obj.db_cached is not None: + columnMap['cached'] = \ + self.convertToDB(obj.db_cached, 'int', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_group_name') and obj.db_group_name is not None: + columnMap['group_name'] = \ + self.convertToDB(obj.db_group_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_group_type') and obj.db_group_type is not None: + columnMap['group_type'] = \ + self.convertToDB(obj.db_group_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None: + columnMap['machine_id'] = \ + self.convertToDB(obj.db_machine_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_item_execs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'group_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPackageSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'package' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type'] + table = 'package' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)') + codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)') + load_configuration = self.convertFromDB(row[4], 'int', 'int') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + description = self.convertFromDB(row[6], 'str', 'varchar(1023)') + registry = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + package = DBPackage(name=name, + identifier=identifier, + codepath=codepath, + load_configuration=load_configuration, + version=version, + description=description, + id=id) + package.db_registry = registry + package.db_entity_id = entity_id + package.db_entity_type = entity_type + package.is_dirty = False + res[('package', id)] = package + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type'] + table = 'package' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)') + codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)') + load_configuration = self.convertFromDB(row[4], 'int', 'int') + version = self.convertFromDB(row[5], 'str', 'varchar(255)') + description = self.convertFromDB(row[6], 'str', 'varchar(1023)') + registry = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + package = DBPackage(name=name, + identifier=identifier, + codepath=codepath, + load_configuration=load_configuration, + version=version, + description=description, + id=id) + package.db_registry = registry + package.db_entity_id = entity_id + package.db_entity_type = entity_type + package.is_dirty = False + res[('package', id)] = package + return res + + def from_sql_fast(self, obj, all_objects): + if ('registry', obj.db_registry) in all_objects: + p = all_objects[('registry', obj.db_registry)] + p.db_add_package(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type'] + table = 'package' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_identifier') and obj.db_identifier is not None: + columnMap['identifier'] = \ + self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)') + if hasattr(obj, 'db_codepath') and obj.db_codepath is not None: + columnMap['codepath'] = \ + self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)') + if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None: + columnMap['load_configuration'] = \ + self.convertToDB(obj.db_load_configuration, 'int', 'int') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_description') and obj.db_description is not None: + columnMap['description'] = \ + self.convertToDB(obj.db_description, 'str', 'varchar(1023)') + if hasattr(obj, 'db_registry') and obj.db_registry is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_registry, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type'] + table = 'package' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_identifier') and obj.db_identifier is not None: + columnMap['identifier'] = \ + self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)') + if hasattr(obj, 'db_codepath') and obj.db_codepath is not None: + columnMap['codepath'] = \ + self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)') + if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None: + columnMap['load_configuration'] = \ + self.convertToDB(obj.db_load_configuration, 'int', 'int') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_description') and obj.db_description is not None: + columnMap['description'] = \ + self.convertToDB(obj.db_description, 'str', 'varchar(1023)') + if hasattr(obj, 'db_registry') and obj.db_registry is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_registry, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_module_descriptors: + child.db_package = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'package' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBWorkflowExecSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'workflow_exec' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type'] + table = 'workflow_exec' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + user = self.convertFromDB(row[1], 'str', 'varchar(255)') + ip = self.convertFromDB(row[2], 'str', 'varchar(255)') + session = self.convertFromDB(row[3], 'long', 'int') + vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)') + ts_start = self.convertFromDB(row[5], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[6], 'datetime', 'datetime') + parent_id = self.convertFromDB(row[7], 'long', 'int') + parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)') + parent_version = self.convertFromDB(row[9], 'long', 'int') + completed = self.convertFromDB(row[10], 'int', 'int') + name = self.convertFromDB(row[11], 'str', 'varchar(255)') + log = self.convertFromDB(row[12], 'long', 'int') + entity_id = self.convertFromDB(row[13], 'long', 'int') + entity_type = self.convertFromDB(row[14], 'str', 'char(16)') + + workflow_exec = DBWorkflowExec(user=user, + ip=ip, + session=session, + vt_version=vt_version, + ts_start=ts_start, + ts_end=ts_end, + parent_id=parent_id, + parent_type=parent_type, + parent_version=parent_version, + completed=completed, + name=name, + id=id) + workflow_exec.db_log = log + workflow_exec.db_entity_id = entity_id + workflow_exec.db_entity_type = entity_type + workflow_exec.is_dirty = False + res[('workflow_exec', id)] = workflow_exec + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type'] + table = 'workflow_exec' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + user = self.convertFromDB(row[1], 'str', 'varchar(255)') + ip = self.convertFromDB(row[2], 'str', 'varchar(255)') + session = self.convertFromDB(row[3], 'long', 'int') + vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)') + ts_start = self.convertFromDB(row[5], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[6], 'datetime', 'datetime') + parent_id = self.convertFromDB(row[7], 'long', 'int') + parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)') + parent_version = self.convertFromDB(row[9], 'long', 'int') + completed = self.convertFromDB(row[10], 'int', 'int') + name = self.convertFromDB(row[11], 'str', 'varchar(255)') + log = self.convertFromDB(row[12], 'long', 'int') + entity_id = self.convertFromDB(row[13], 'long', 'int') + entity_type = self.convertFromDB(row[14], 'str', 'char(16)') + + workflow_exec = DBWorkflowExec(user=user, + ip=ip, + session=session, + vt_version=vt_version, + ts_start=ts_start, + ts_end=ts_end, + parent_id=parent_id, + parent_type=parent_type, + parent_version=parent_version, + completed=completed, + name=name, + id=id) + workflow_exec.db_log = log + workflow_exec.db_entity_id = entity_id + workflow_exec.db_entity_type = entity_type + workflow_exec.is_dirty = False + res[('workflow_exec', id)] = workflow_exec + return res + + def from_sql_fast(self, obj, all_objects): + if ('log', obj.db_log) in all_objects: + p = all_objects[('log', obj.db_log)] + p.db_add_workflow_exec(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type'] + table = 'workflow_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_ip') and obj.db_ip is not None: + columnMap['ip'] = \ + self.convertToDB(obj.db_ip, 'str', 'varchar(255)') + if hasattr(obj, 'db_session') and obj.db_session is not None: + columnMap['session'] = \ + self.convertToDB(obj.db_session, 'long', 'int') + if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None: + columnMap['vt_version'] = \ + self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent_id, 'long', 'int') + if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None: + columnMap['parent_version'] = \ + self.convertToDB(obj.db_parent_version, 'long', 'int') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_log') and obj.db_log is not None: + columnMap['log_id'] = \ + self.convertToDB(obj.db_log, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type'] + table = 'workflow_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_ip') and obj.db_ip is not None: + columnMap['ip'] = \ + self.convertToDB(obj.db_ip, 'str', 'varchar(255)') + if hasattr(obj, 'db_session') and obj.db_session is not None: + columnMap['session'] = \ + self.convertToDB(obj.db_session, 'long', 'int') + if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None: + columnMap['vt_version'] = \ + self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent_id, 'long', 'int') + if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)') + if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None: + columnMap['parent_version'] = \ + self.convertToDB(obj.db_parent_version, 'long', 'int') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_log') and obj.db_log is not None: + columnMap['log_id'] = \ + self.convertToDB(obj.db_log, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_machines: + child.db_workflow_exec = obj.db_id + for child in obj.db_item_execs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'workflow_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBParameterExplorationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'parameter_exploration' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type'] + table = 'parameter_exploration' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + action_id = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + date = self.convertFromDB(row[3], 'datetime', 'datetime') + user = self.convertFromDB(row[4], 'str', 'varchar(255)') + dims = self.convertFromDB(row[5], 'str', 'varchar(255)') + layout = self.convertFromDB(row[6], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + parameter_exploration = DBParameterExploration(action_id=action_id, + name=name, + date=date, + user=user, + dims=dims, + layout=layout, + id=id) + parameter_exploration.db_vistrail = vistrail + parameter_exploration.db_entity_id = entity_id + parameter_exploration.db_entity_type = entity_type + parameter_exploration.is_dirty = False + res[('parameter_exploration', id)] = parameter_exploration + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type'] + table = 'parameter_exploration' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + action_id = self.convertFromDB(row[1], 'long', 'int') + name = self.convertFromDB(row[2], 'str', 'varchar(255)') + date = self.convertFromDB(row[3], 'datetime', 'datetime') + user = self.convertFromDB(row[4], 'str', 'varchar(255)') + dims = self.convertFromDB(row[5], 'str', 'varchar(255)') + layout = self.convertFromDB(row[6], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[7], 'long', 'int') + entity_id = self.convertFromDB(row[8], 'long', 'int') + entity_type = self.convertFromDB(row[9], 'str', 'char(16)') + + parameter_exploration = DBParameterExploration(action_id=action_id, + name=name, + date=date, + user=user, + dims=dims, + layout=layout, + id=id) + parameter_exploration.db_vistrail = vistrail + parameter_exploration.db_entity_id = entity_id + parameter_exploration.db_entity_type = entity_type + parameter_exploration.is_dirty = False + res[('parameter_exploration', id)] = parameter_exploration + return res + + def from_sql_fast(self, obj, all_objects): + if ('vistrail', obj.db_vistrail) in all_objects: + p = all_objects[('vistrail', obj.db_vistrail)] + p.db_add_parameter_exploration(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type'] + table = 'parameter_exploration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_dims') and obj.db_dims is not None: + columnMap['dims'] = \ + self.convertToDB(obj.db_dims, 'str', 'varchar(255)') + if hasattr(obj, 'db_layout') and obj.db_layout is not None: + columnMap['layout'] = \ + self.convertToDB(obj.db_layout, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type'] + table = 'parameter_exploration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_dims') and obj.db_dims is not None: + columnMap['dims'] = \ + self.convertToDB(obj.db_dims, 'str', 'varchar(255)') + if hasattr(obj, 'db_layout') and obj.db_layout is not None: + columnMap['layout'] = \ + self.convertToDB(obj.db_layout, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_functions: + child.db_parameter_exploration = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'parameter_exploration' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBLoopExecSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'loop_exec' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'loop_exec' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + loop_exec = DBLoopExec(ts_start=ts_start, + ts_end=ts_end, + id=id) + loop_exec.db_parentType = parentType + loop_exec.db_entity_id = entity_id + loop_exec.db_entity_type = entity_type + loop_exec.db_parent = parent + loop_exec.is_dirty = False + res[('loop_exec', id)] = loop_exec + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'loop_exec' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + loop_exec = DBLoopExec(ts_start=ts_start, + ts_end=ts_end, + id=id) + loop_exec.db_parentType = parentType + loop_exec.db_entity_id = entity_id + loop_exec.db_entity_type = entity_type + loop_exec.db_parent = parent + loop_exec.is_dirty = False + res[('loop_exec', id)] = loop_exec + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow_exec': + p = all_objects[('workflow_exec', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'group_exec': + p = all_objects[('group_exec', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'module_exec': + p = all_objects[('module_exec', obj.db_parent)] + p.db_add_loop_exec(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'loop_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'loop_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_loop_iterations: + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'loop_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBControlParameterSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'control_parameter' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'control_parameter' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'mediumtext') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + controlParameter = DBControlParameter(name=name, + value=value, + id=id) + controlParameter.db_parentType = parentType + controlParameter.db_entity_id = entity_id + controlParameter.db_entity_type = entity_type + controlParameter.db_parent = parent + controlParameter.is_dirty = False + res[('controlParameter', id)] = controlParameter + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'control_parameter' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + name = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'mediumtext') + parentType = self.convertFromDB(row[3], 'str', 'char(32)') + entity_id = self.convertFromDB(row[4], 'long', 'int') + entity_type = self.convertFromDB(row[5], 'str', 'char(16)') + parent = self.convertFromDB(row[6], 'long', 'long') + + controlParameter = DBControlParameter(name=name, + value=value, + id=id) + controlParameter.db_parentType = parentType + controlParameter.db_entity_id = entity_id + controlParameter.db_entity_type = entity_type + controlParameter.db_parent = parent + controlParameter.is_dirty = False + res[('controlParameter', id)] = controlParameter + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'vistrail': + p = all_objects[('vistrail', obj.db_parent)] + p.db_add_controlParameter(obj) + elif obj.db_parentType == 'module': + p = all_objects[('module', obj.db_parent)] + p.db_add_controlParameter(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'abstraction': + p = all_objects[('abstraction', obj.db_parent)] + p.db_add_controlParameter(obj) + elif obj.db_parentType == 'group': + p = all_objects[('group', obj.db_parent)] + p.db_add_controlParameter(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'control_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'control_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'mediumtext') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'control_parameter' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBMashupActionAnnotationSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'mashup_action_annotation' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action_annotation' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(8191)') + action_id = self.convertFromDB(row[3], 'long', 'int') + date = self.convertFromDB(row[4], 'datetime', 'datetime') + user = self.convertFromDB(row[5], 'str', 'varchar(255)') + mashuptrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + mashup_actionAnnotation = DBMashupActionAnnotation(key=key, + value=value, + action_id=action_id, + date=date, + user=user, + id=id) + mashup_actionAnnotation.db_mashuptrail = mashuptrail + mashup_actionAnnotation.db_entity_id = entity_id + mashup_actionAnnotation.db_entity_type = entity_type + mashup_actionAnnotation.is_dirty = False + res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action_annotation' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + key = self.convertFromDB(row[1], 'str', 'varchar(255)') + value = self.convertFromDB(row[2], 'str', 'varchar(8191)') + action_id = self.convertFromDB(row[3], 'long', 'int') + date = self.convertFromDB(row[4], 'datetime', 'datetime') + user = self.convertFromDB(row[5], 'str', 'varchar(255)') + mashuptrail = self.convertFromDB(row[6], 'long', 'int') + entity_id = self.convertFromDB(row[7], 'long', 'int') + entity_type = self.convertFromDB(row[8], 'str', 'char(16)') + + mashup_actionAnnotation = DBMashupActionAnnotation(key=key, + value=value, + action_id=action_id, + date=date, + user=user, + id=id) + mashup_actionAnnotation.db_mashuptrail = mashuptrail + mashup_actionAnnotation.db_entity_id = entity_id + mashup_actionAnnotation.db_entity_type = entity_type + mashup_actionAnnotation.is_dirty = False + res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation + return res + + def from_sql_fast(self, obj, all_objects): + if ('mashuptrail', obj.db_mashuptrail) in all_objects: + p = all_objects[('mashuptrail', obj.db_mashuptrail)] + p.db_add_actionAnnotation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_mashuptrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'mashup_action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_key') and obj.db_key is not None: + columnMap['akey'] = \ + self.convertToDB(obj.db_key, 'str', 'varchar(255)') + if hasattr(obj, 'db_value') and obj.db_value is not None: + columnMap['value'] = \ + self.convertToDB(obj.db_value, 'str', 'varchar(8191)') + if hasattr(obj, 'db_action_id') and obj.db_action_id is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action_id, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_mashuptrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'mashup_action_annotation' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBConnectionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'connection_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'connection_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + parentType = self.convertFromDB(row[1], 'str', 'char(32)') + entity_id = self.convertFromDB(row[2], 'long', 'int') + entity_type = self.convertFromDB(row[3], 'str', 'char(16)') + parent = self.convertFromDB(row[4], 'long', 'long') + + connection = DBConnection(id=id) + connection.db_parentType = parentType + connection.db_entity_id = entity_id + connection.db_entity_type = entity_type + connection.db_parent = parent + connection.is_dirty = False + res[('connection', id)] = connection + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'connection_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + parentType = self.convertFromDB(row[1], 'str', 'char(32)') + entity_id = self.convertFromDB(row[2], 'long', 'int') + entity_type = self.convertFromDB(row[3], 'str', 'char(16)') + parent = self.convertFromDB(row[4], 'long', 'long') + + connection = DBConnection(id=id) + connection.db_parentType = parentType + connection.db_entity_id = entity_id + connection.db_entity_type = entity_type + connection.db_parent = parent + connection.is_dirty = False + res[('connection', id)] = connection + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow': + p = all_objects[('workflow', obj.db_parent)] + p.db_add_connection(obj) + elif obj.db_parentType == 'add': + p = all_objects[('add', obj.db_parent)] + p.db_add_data(obj) + elif obj.db_parentType == 'change': + p = all_objects[('change', obj.db_parent)] + p.db_add_data(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'connection_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'connection_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_ports: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'connection_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBPEFunctionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'pe_function' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_function' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + module_id = self.convertFromDB(row[1], 'long', 'int') + port_name = self.convertFromDB(row[2], 'str', 'varchar(255)') + is_alias = self.convertFromDB(row[3], 'long', 'int') + parentType = self.convertFromDB(row[4], 'str', 'char(32)') + parameter_exploration = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + pe_function = DBPEFunction(module_id=module_id, + port_name=port_name, + id=id) + pe_function.db_parentType = parentType + pe_function.db_parameter_exploration = parameter_exploration + pe_function.db_entity_id = entity_id + pe_function.db_entity_type = entity_type + pe_function.is_dirty = False + res[('pe_function', id)] = pe_function + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_function' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + module_id = self.convertFromDB(row[1], 'long', 'int') + port_name = self.convertFromDB(row[2], 'str', 'varchar(255)') + is_alias = self.convertFromDB(row[3], 'long', 'int') + parentType = self.convertFromDB(row[4], 'str', 'char(32)') + parameter_exploration = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + pe_function = DBPEFunction(module_id=module_id, + port_name=port_name, + id=id) + pe_function.db_parentType = parentType + pe_function.db_parameter_exploration = parameter_exploration + pe_function.db_entity_id = entity_id + pe_function.db_entity_type = entity_type + pe_function.is_dirty = False + res[('pe_function', id)] = pe_function + return res + + def from_sql_fast(self, obj, all_objects): + if ('parameter_exploration', obj.db_parameter_exploration) in all_objects: + p = all_objects[('parameter_exploration', obj.db_parameter_exploration)] + p.db_add_function(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_port_name') and obj.db_port_name is not None: + columnMap['port_name'] = \ + self.convertToDB(obj.db_port_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None: + columnMap['is_alias'] = \ + self.convertToDB(obj.db_is_alias, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parameter_exploration, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type'] + table = 'pe_function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_port_name') and obj.db_port_name is not None: + columnMap['port_name'] = \ + self.convertToDB(obj.db_port_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None: + columnMap['is_alias'] = \ + self.convertToDB(obj.db_is_alias, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parameter_exploration, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_parameters: + child.db_pe_function = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'pe_function' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBActionSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'action' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + prevId = self.convertFromDB(row[1], 'long', 'int') + date = self.convertFromDB(row[2], 'datetime', 'datetime') + session = self.convertFromDB(row[3], 'long', 'int') + user = self.convertFromDB(row[4], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + action = DBAction(prevId=prevId, + date=date, + session=session, + user=user, + id=id) + action.db_vistrail = vistrail + action.db_entity_id = entity_id + action.db_entity_type = entity_type + action.is_dirty = False + res[('action', id)] = action + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + prevId = self.convertFromDB(row[1], 'long', 'int') + date = self.convertFromDB(row[2], 'datetime', 'datetime') + session = self.convertFromDB(row[3], 'long', 'int') + user = self.convertFromDB(row[4], 'str', 'varchar(255)') + vistrail = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + action = DBAction(prevId=prevId, + date=date, + session=session, + user=user, + id=id) + action.db_vistrail = vistrail + action.db_entity_id = entity_id + action.db_entity_type = entity_type + action.is_dirty = False + res[('action', id)] = action + return res + + def from_sql_fast(self, obj, all_objects): + if ('vistrail', obj.db_vistrail) in all_objects: + p = all_objects[('vistrail', obj.db_vistrail)] + p.db_add_action(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_prevId') and obj.db_prevId is not None: + columnMap['prev_id'] = \ + self.convertToDB(obj.db_prevId, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_session') and obj.db_session is not None: + columnMap['session'] = \ + self.convertToDB(obj.db_session, 'long', 'int') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type'] + table = 'action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_prevId') and obj.db_prevId is not None: + columnMap['prev_id'] = \ + self.convertToDB(obj.db_prevId, 'long', 'int') + if hasattr(obj, 'db_date') and obj.db_date is not None: + columnMap['date'] = \ + self.convertToDB(obj.db_date, 'datetime', 'datetime') + if hasattr(obj, 'db_session') and obj.db_session is not None: + columnMap['session'] = \ + self.convertToDB(obj.db_session, 'long', 'int') + if hasattr(obj, 'db_user') and obj.db_user is not None: + columnMap['user'] = \ + self.convertToDB(obj.db_user, 'str', 'varchar(255)') + if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_vistrail, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_operations: + child.db_action = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'action' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBDeleteSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'delete_tbl' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'delete_tbl' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + objectId = self.convertFromDB(row[2], 'long', 'int') + parentObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjType = self.convertFromDB(row[4], 'str', 'char(16)') + action = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + delete = DBDelete(what=what, + objectId=objectId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + delete.db_action = action + delete.db_entity_id = entity_id + delete.db_entity_type = entity_type + delete.is_dirty = False + res[('delete', id)] = delete + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'delete_tbl' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + what = self.convertFromDB(row[1], 'str', 'varchar(255)') + objectId = self.convertFromDB(row[2], 'long', 'int') + parentObjId = self.convertFromDB(row[3], 'long', 'int') + parentObjType = self.convertFromDB(row[4], 'str', 'char(16)') + action = self.convertFromDB(row[5], 'long', 'int') + entity_id = self.convertFromDB(row[6], 'long', 'int') + entity_type = self.convertFromDB(row[7], 'str', 'char(16)') + + delete = DBDelete(what=what, + objectId=objectId, + parentObjId=parentObjId, + parentObjType=parentObjType, + id=id) + delete.db_action = action + delete.db_entity_id = entity_id + delete.db_entity_type = entity_type + delete.is_dirty = False + res[('delete', id)] = delete + return res + + def from_sql_fast(self, obj, all_objects): + if ('action', obj.db_action) in all_objects: + p = all_objects[('action', obj.db_action)] + p.db_add_operation(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'delete_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_objectId') and obj.db_objectId is not None: + columnMap['object_id'] = \ + self.convertToDB(obj.db_objectId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type'] + table = 'delete_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_what') and obj.db_what is not None: + columnMap['what'] = \ + self.convertToDB(obj.db_what, 'str', 'varchar(255)') + if hasattr(obj, 'db_objectId') and obj.db_objectId is not None: + columnMap['object_id'] = \ + self.convertToDB(obj.db_objectId, 'long', 'int') + if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None: + columnMap['par_obj_id'] = \ + self.convertToDB(obj.db_parentObjId, 'long', 'int') + if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None: + columnMap['par_obj_type'] = \ + self.convertToDB(obj.db_parentObjType, 'str', 'char(16)') + if hasattr(obj, 'db_action') and obj.db_action is not None: + columnMap['action_id'] = \ + self.convertToDB(obj.db_action, 'long', 'int') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + pass + + def delete_sql_column(self, db, obj, global_props): + table = 'delete_tbl' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBVistrailSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'vistrail' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'name', 'last_modified'] + table = 'vistrail' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + + vistrail = DBVistrail(entity_type=entity_type, + version=version, + name=name, + last_modified=last_modified, + id=id) + vistrail.is_dirty = False + res[('vistrail', id)] = vistrail + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'entity_type', 'version', 'name', 'last_modified'] + table = 'vistrail' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + global_props['entity_id'] = self.convertToDB(id, 'long', 'int') + entity_type = self.convertFromDB(row[1], 'str', 'char(16)') + global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)') + version = self.convertFromDB(row[2], 'str', 'char(16)') + name = self.convertFromDB(row[3], 'str', 'varchar(255)') + last_modified = self.convertFromDB(row[4], 'datetime', 'datetime') + + vistrail = DBVistrail(entity_type=entity_type, + version=version, + name=name, + last_modified=last_modified, + id=id) + vistrail.is_dirty = False + res[('vistrail', id)] = vistrail + return res + + def from_sql_fast(self, obj, all_objects): + pass + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'entity_type', 'version', 'name', 'last_modified'] + table = 'vistrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'entity_type', 'version', 'name', 'last_modified'] + table = 'vistrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_version') and obj.db_version is not None: + columnMap['version'] = \ + self.convertToDB(obj.db_version, 'str', 'char(16)') + if hasattr(obj, 'db_name') and obj.db_name is not None: + columnMap['name'] = \ + self.convertToDB(obj.db_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None: + columnMap['last_modified'] = \ + self.convertToDB(obj.db_last_modified, 'datetime', 'datetime') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + if obj.db_id is None: + obj.db_id = lastId + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_id') and obj.db_id is not None: + global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int') + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_actions: + child.db_vistrail = obj.db_id + for child in obj.db_tags: + child.db_vistrail = obj.db_id + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_controlParameters: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_vistrailVariables: + child.db_vistrail = obj.db_id + for child in obj.db_parameter_explorations: + child.db_vistrail = obj.db_id + for child in obj.db_actionAnnotations: + child.db_vistrail = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'vistrail' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +class DBModuleExecSQLDAOBase(SQLDAO): + + def __init__(self, daoList): + self.daoList = daoList + self.table = 'module_exec' + + def getDao(self, dao): + return self.daoList[dao] + + def get_sql_columns(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module_exec' + whereMap = global_props + orderBy = 'id' + + dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock) + data = self.executeSQL(db, dbCommand, True) + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + cached = self.convertFromDB(row[3], 'int', 'int') + module_id = self.convertFromDB(row[4], 'long', 'int') + module_name = self.convertFromDB(row[5], 'str', 'varchar(255)') + completed = self.convertFromDB(row[6], 'int', 'int') + error = self.convertFromDB(row[7], 'str', 'varchar(1023)') + machine_id = self.convertFromDB(row[8], 'long', 'int') + parentType = self.convertFromDB(row[9], 'str', 'char(32)') + entity_id = self.convertFromDB(row[10], 'long', 'int') + entity_type = self.convertFromDB(row[11], 'str', 'char(16)') + parent = self.convertFromDB(row[12], 'long', 'long') + + module_exec = DBModuleExec(ts_start=ts_start, + ts_end=ts_end, + cached=cached, + module_id=module_id, + module_name=module_name, + completed=completed, + error=error, + machine_id=machine_id, + id=id) + module_exec.db_parentType = parentType + module_exec.db_entity_id = entity_id + module_exec.db_entity_type = entity_type + module_exec.db_parent = parent + module_exec.is_dirty = False + res[('module_exec', id)] = module_exec + return res + + def get_sql_select(self, db, global_props,lock=False): + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module_exec' + whereMap = global_props + orderBy = 'id' + return self.createSQLSelect(table, columns, whereMap, orderBy, lock) + + def process_sql_columns(self, data, global_props): + res = {} + for row in data: + id = self.convertFromDB(row[0], 'long', 'int') + ts_start = self.convertFromDB(row[1], 'datetime', 'datetime') + ts_end = self.convertFromDB(row[2], 'datetime', 'datetime') + cached = self.convertFromDB(row[3], 'int', 'int') + module_id = self.convertFromDB(row[4], 'long', 'int') + module_name = self.convertFromDB(row[5], 'str', 'varchar(255)') + completed = self.convertFromDB(row[6], 'int', 'int') + error = self.convertFromDB(row[7], 'str', 'varchar(1023)') + machine_id = self.convertFromDB(row[8], 'long', 'int') + parentType = self.convertFromDB(row[9], 'str', 'char(32)') + entity_id = self.convertFromDB(row[10], 'long', 'int') + entity_type = self.convertFromDB(row[11], 'str', 'char(16)') + parent = self.convertFromDB(row[12], 'long', 'long') + + module_exec = DBModuleExec(ts_start=ts_start, + ts_end=ts_end, + cached=cached, + module_id=module_id, + module_name=module_name, + completed=completed, + error=error, + machine_id=machine_id, + id=id) + module_exec.db_parentType = parentType + module_exec.db_entity_id = entity_id + module_exec.db_entity_type = entity_type + module_exec.db_parent = parent + module_exec.is_dirty = False + res[('module_exec', id)] = module_exec + return res + + def from_sql_fast(self, obj, all_objects): + if obj.db_parentType == 'workflow_exec': + p = all_objects[('workflow_exec', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'group_exec': + p = all_objects[('group_exec', obj.db_parent)] + p.db_add_item_exec(obj) + elif obj.db_parentType == 'loop_iteration': + p = all_objects[('loop_iteration', obj.db_parent)] + p.db_add_item_exec(obj) + + def set_sql_columns(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_cached') and obj.db_cached is not None: + columnMap['cached'] = \ + self.convertToDB(obj.db_cached, 'int', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_module_name') and obj.db_module_name is not None: + columnMap['module_name'] = \ + self.convertToDB(obj.db_module_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None: + columnMap['machine_id'] = \ + self.convertToDB(obj.db_machine_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + lastId = self.executeSQL(db, dbCommand, False) + + def set_sql_command(self, db, obj, global_props, do_copy=True): + if not do_copy and not obj.is_dirty: + return None + columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id'] + table = 'module_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + columnMap = {} + if hasattr(obj, 'db_id') and obj.db_id is not None: + columnMap['id'] = \ + self.convertToDB(obj.db_id, 'long', 'int') + if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None: + columnMap['ts_start'] = \ + self.convertToDB(obj.db_ts_start, 'datetime', 'datetime') + if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None: + columnMap['ts_end'] = \ + self.convertToDB(obj.db_ts_end, 'datetime', 'datetime') + if hasattr(obj, 'db_cached') and obj.db_cached is not None: + columnMap['cached'] = \ + self.convertToDB(obj.db_cached, 'int', 'int') + if hasattr(obj, 'db_module_id') and obj.db_module_id is not None: + columnMap['module_id'] = \ + self.convertToDB(obj.db_module_id, 'long', 'int') + if hasattr(obj, 'db_module_name') and obj.db_module_name is not None: + columnMap['module_name'] = \ + self.convertToDB(obj.db_module_name, 'str', 'varchar(255)') + if hasattr(obj, 'db_completed') and obj.db_completed is not None: + columnMap['completed'] = \ + self.convertToDB(obj.db_completed, 'int', 'int') + if hasattr(obj, 'db_error') and obj.db_error is not None: + columnMap['error'] = \ + self.convertToDB(obj.db_error, 'str', 'varchar(1023)') + if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None: + columnMap['machine_id'] = \ + self.convertToDB(obj.db_machine_id, 'long', 'int') + if hasattr(obj, 'db_parentType') and obj.db_parentType is not None: + columnMap['parent_type'] = \ + self.convertToDB(obj.db_parentType, 'str', 'char(32)') + if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None: + columnMap['entity_id'] = \ + self.convertToDB(obj.db_entity_id, 'long', 'int') + if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None: + columnMap['entity_type'] = \ + self.convertToDB(obj.db_entity_type, 'str', 'char(16)') + if hasattr(obj, 'db_parent') and obj.db_parent is not None: + columnMap['parent_id'] = \ + self.convertToDB(obj.db_parent, 'long', 'long') + columnMap.update(global_props) + + if obj.is_new or do_copy: + dbCommand = self.createSQLInsert(table, columnMap) + else: + dbCommand = self.createSQLUpdate(table, columnMap, whereMap) + return dbCommand + + def set_sql_process(self, obj, global_props, lastId): + pass + + def to_sql_fast(self, obj, do_copy=True): + for child in obj.db_annotations: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + for child in obj.db_loop_execs: + child.db_parentType = obj.vtType + child.db_parent = obj.db_id + + def delete_sql_column(self, db, obj, global_props): + table = 'module_exec' + whereMap = {} + whereMap.update(global_props) + if obj.db_id is not None: + keyStr = self.convertToDB(obj.db_id, 'long', 'int') + whereMap['id'] = keyStr + dbCommand = self.createSQLDelete(table, whereMap) + self.executeSQL(db, dbCommand, False) + +"""generated automatically by auto_dao.py""" + +class SQLDAOListBase(dict): + + def __init__(self, daos=None): + if daos is not None: + dict.update(self, daos) + + if 'vistrailVariable' not in self: + self['vistrailVariable'] = DBVistrailVariableSQLDAOBase(self) + if 'portSpec' not in self: + self['portSpec'] = DBPortSpecSQLDAOBase(self) + if 'module' not in self: + self['module'] = DBModuleSQLDAOBase(self) + if 'module_descriptor' not in self: + self['module_descriptor'] = DBModuleDescriptorSQLDAOBase(self) + if 'tag' not in self: + self['tag'] = DBTagSQLDAOBase(self) + if 'port' not in self: + self['port'] = DBPortSQLDAOBase(self) + if 'group' not in self: + self['group'] = DBGroupSQLDAOBase(self) + if 'log' not in self: + self['log'] = DBLogSQLDAOBase(self) + if 'loop_iteration' not in self: + self['loop_iteration'] = DBLoopIterationSQLDAOBase(self) + if 'mashup' not in self: + self['mashup'] = DBMashupSQLDAOBase(self) + if 'portSpecItem' not in self: + self['portSpecItem'] = DBPortSpecItemSQLDAOBase(self) + if 'machine' not in self: + self['machine'] = DBMachineSQLDAOBase(self) + if 'add' not in self: + self['add'] = DBAddSQLDAOBase(self) + if 'other' not in self: + self['other'] = DBOtherSQLDAOBase(self) + if 'location' not in self: + self['location'] = DBLocationSQLDAOBase(self) + if 'pe_parameter' not in self: + self['pe_parameter'] = DBPEParameterSQLDAOBase(self) + if 'parameter' not in self: + self['parameter'] = DBParameterSQLDAOBase(self) + if 'plugin_data' not in self: + self['plugin_data'] = DBPluginDataSQLDAOBase(self) + if 'function' not in self: + self['function'] = DBFunctionSQLDAOBase(self) + if 'actionAnnotation' not in self: + self['actionAnnotation'] = DBActionAnnotationSQLDAOBase(self) + if 'abstraction' not in self: + self['abstraction'] = DBAbstractionSQLDAOBase(self) + if 'mashup_alias' not in self: + self['mashup_alias'] = DBMashupAliasSQLDAOBase(self) + if 'workflow' not in self: + self['workflow'] = DBWorkflowSQLDAOBase(self) + if 'mashup_action' not in self: + self['mashup_action'] = DBMashupActionSQLDAOBase(self) + if 'mashuptrail' not in self: + self['mashuptrail'] = DBMashuptrailSQLDAOBase(self) + if 'registry' not in self: + self['registry'] = DBRegistrySQLDAOBase(self) + if 'mashup_component' not in self: + self['mashup_component'] = DBMashupComponentSQLDAOBase(self) + if 'annotation' not in self: + self['annotation'] = DBAnnotationSQLDAOBase(self) + if 'change' not in self: + self['change'] = DBChangeSQLDAOBase(self) + if 'group_exec' not in self: + self['group_exec'] = DBGroupExecSQLDAOBase(self) + if 'package' not in self: + self['package'] = DBPackageSQLDAOBase(self) + if 'workflow_exec' not in self: + self['workflow_exec'] = DBWorkflowExecSQLDAOBase(self) + if 'parameter_exploration' not in self: + self['parameter_exploration'] = DBParameterExplorationSQLDAOBase(self) + if 'loop_exec' not in self: + self['loop_exec'] = DBLoopExecSQLDAOBase(self) + if 'controlParameter' not in self: + self['controlParameter'] = DBControlParameterSQLDAOBase(self) + if 'mashup_actionAnnotation' not in self: + self['mashup_actionAnnotation'] = DBMashupActionAnnotationSQLDAOBase(self) + if 'connection' not in self: + self['connection'] = DBConnectionSQLDAOBase(self) + if 'pe_function' not in self: + self['pe_function'] = DBPEFunctionSQLDAOBase(self) + if 'action' not in self: + self['action'] = DBActionSQLDAOBase(self) + if 'delete' not in self: + self['delete'] = DBDeleteSQLDAOBase(self) + if 'vistrail' not in self: + self['vistrail'] = DBVistrailSQLDAOBase(self) + if 'module_exec' not in self: + self['module_exec'] = DBModuleExecSQLDAOBase(self) diff --git a/vistrails/db/versions/v1_0_4/persistence/sql/sql_dao.py b/vistrails/db/versions/v1_0_4/persistence/sql/sql_dao.py new file mode 100644 index 000000000..b08a94e89 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/persistence/sql/sql_dao.py @@ -0,0 +1,259 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +from datetime import date, datetime + +from vistrails.core import debug +from vistrails.core.system import strftime, time_strptime +from vistrails.db import VistrailsDBException +from vistrails.db.services.io import get_db_lib + +class SQLDAO: + def __init__(self): + pass + + def convertFromDB(self, value, type, db_type): + if value is not None: + if type == 'str': + return str(value) + elif type == 'long': + return long(value) + elif type == 'float': + return float(value) + elif type == 'int': + return int(value) + elif type == 'date': + if db_type == 'date': + return value + else: + return date(*time_strptime(str(value), '%Y-%m-%d')[0:3]) + elif type == 'datetime': + if db_type == 'datetime': + return value + else: + return datetime(*time_strptime(str(value), + '%Y-%m-%d %H:%M:%S')[0:6]) + return None + + def convertWarning(self, before, after, _from, to): + text = ["Value truncated when saving to database", + "%s truncated to %s\nwhile converting '%s' to '%s'"] + debug.warning(text[0], text[1] % (before, after, _from, to)) + + def convertToDB(self, value, type, db_type): + if value is not None: + if type == 'str': + value = str(value) + if db_type.startswith('varchar'): + try: + length = int(db_type[8:-1]) + if len(value) > length: + self.convertWarning(value, value[:length], + type, db_type) + value = value[:length] + except Exception, e: + pass + if db_type.startswith('char'): + try: + length = int(db_type[5:-1]) + if len(value) > length: + self.convertWarning(value, value[:length], + type, db_type) + value = value[:length] + except Exception, e: + pass + # return "'" + str(value).replace("'", "''") + "'" + return value + elif type == 'long': + return str(value) + elif type == 'float': + # necessary to avoid conversion warnings in MySQL + if db_type.startswith('DECIMAL'): + try: + value="%%.%sf"%str(db_type[8:-1].split(',')[1])%value + except Exception, e: + pass + return str(value) + elif type == 'int': + # note: on 64-bit machines int:s are 64-bit + MIN_INT = -2147483648 + MAX_INT = 2147483647 + if db_type == 'int': + if int(value) < MIN_INT: + self.convertWarning(value, MIN_INT, type, db_type) + value = MIN_INT + if int(value) > MAX_INT: + self.convertWarning(value, MAX_INT, type, db_type) + value = MAX_INT + return str(value) + elif type == 'date': + return value.isoformat() + elif type == 'datetime': + return strftime(value, '%Y-%m-%d %H:%M:%S') + else: + return str(value) + + return None + + def createSQLSelect(self, table, columns, whereMap, orderBy=None, + forUpdate=False): + columnStr = ', '.join(columns) + whereStr = '' + whereClause = '' + values = [] + for column, value in whereMap.iteritems(): + whereStr += '%s%s = %%s' % \ + (whereClause, column) + values.append(value) + whereClause = ' AND ' + dbCommand = """SELECT %s FROM %s WHERE %s""" % \ + (columnStr, table, whereStr) + if orderBy is not None: + dbCommand += " ORDER BY " + orderBy + if forUpdate: + dbCommand += " FOR UPDATE" + dbCommand += ";" + return (dbCommand, tuple(values)) + + def createSQLInsert(self, table, columnMap): + columns = [] + values = [] + for column, value in columnMap.iteritems(): + if value is None: + value = 'NULL' + columns.append(column) + values.append(value) + columnStr = ', '.join(columns) + # valueStr = '%s, '.join(values) + valueStr = '' + if len(values) > 1: + valueStr = '%s,' * (len(values) - 1) + '%s' + dbCommand = """INSERT INTO %s(%s) VALUES (%s);""" % \ + (table, columnStr, valueStr) + return (dbCommand, tuple(values)) + + def createSQLUpdate(self, table, columnMap, whereMap): + setStr = '' + comma = '' + values = [] + for column, value in columnMap.iteritems(): +# if value is None: +# value = 'NULL' + setStr += '%s%s = %%s' % (comma, column) + comma = ', ' + values.append(value) + whereStr = '' + whereClause = '' + for column, value in whereMap.iteritems(): + whereStr += '%s%s = %%s' % (whereClause, column) + values.append(value) + whereClause = ' AND ' + dbCommand = """UPDATE %s SET %s WHERE %s;""" % \ + (table, setStr, whereStr) + return (dbCommand, tuple(values)) + + def createSQLDelete(self, table, whereMap): + whereStr = '' + whereClause = '' + values = [] + for column, value in whereMap.iteritems(): + whereStr += '%s %s = %%s' % (whereClause, column) + values.append(value) + whereClause = ' AND ' + dbCommand = """DELETE FROM %s WHERE %s;""" % \ + (table, whereStr) + return (dbCommand, tuple(values)) + + def executeSQL(self, db, cmd_tuple, isFetch): + dbCommand, values = cmd_tuple + # print 'db: %s' % dbCommand + # print 'values:', values + data = None + cursor = db.cursor() + try: + cursor.execute(dbCommand, values) + if isFetch: + data = cursor.fetchall() + else: + data = cursor.lastrowid + except Exception, e: + raise VistrailsDBException('Command "%s" with values "%s" ' + 'failed: %s' % (dbCommand, values, e)) + finally: + cursor.close() + return data + + def executeSQLGroup(self, db, dbCommandList, isFetch): + """ Executes a command consisting of multiple SELECT statements + It returns a list of results from the SELECT statements + """ + data = [] + # break up into bundles + BUNDLE_SIZE = 10000 + num_commands = len(dbCommandList) + n = 0 + while n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vistrails/db/versions/v1_0_4/schemas/xml/vistrail.xsd b/vistrails/db/versions/v1_0_4/schemas/xml/vistrail.xsd new file mode 100644 index 000000000..7dee723e9 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/schemas/xml/vistrail.xsd @@ -0,0 +1,361 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vistrails/db/versions/v1_0_4/schemas/xml/vtlink.xsd b/vistrails/db/versions/v1_0_4/schemas/xml/vtlink.xsd new file mode 100644 index 000000000..011b59cca --- /dev/null +++ b/vistrails/db/versions/v1_0_4/schemas/xml/vtlink.xsd @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vistrails/db/versions/v1_0_4/schemas/xml/workflow.xsd b/vistrails/db/versions/v1_0_4/schemas/xml/workflow.xsd new file mode 100644 index 000000000..859101cb0 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/schemas/xml/workflow.xsd @@ -0,0 +1,211 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vistrails/db/versions/v1_0_4/specs/all.xml b/vistrails/db/versions/v1_0_4/specs/all.xml new file mode 100644 index 000000000..2a395b82f --- /dev/null +++ b/vistrails/db/versions/v1_0_4/specs/all.xml @@ -0,0 +1,3955 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vistrails/db/versions/v1_0_4/translate/__init__.py b/vistrails/db/versions/v1_0_4/translate/__init__.py new file mode 100644 index 000000000..7979ce21d --- /dev/null +++ b/vistrails/db/versions/v1_0_4/translate/__init__.py @@ -0,0 +1,36 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### + +version = '1.0.4' diff --git a/vistrails/db/versions/v1_0_4/translate/v1_0_3.py b/vistrails/db/versions/v1_0_4/translate/v1_0_3.py new file mode 100644 index 000000000..fdafd8930 --- /dev/null +++ b/vistrails/db/versions/v1_0_4/translate/v1_0_3.py @@ -0,0 +1,136 @@ +############################################################################### +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### +from vistrails.db.versions.v1_0_4.domain import DBVistrail, DBVistrailVariable, \ + DBWorkflow, DBLog, DBRegistry, \ + DBAdd, DBChange, DBDelete, \ + DBPortSpec, DBPortSpecItem, \ + DBParameterExploration, \ + DBPEParameter, DBPEFunction, \ + IdScope, DBAbstraction, \ + DBModule, DBGroup, DBAnnotation, \ + DBActionAnnotation + +from vistrails.db.services.vistrail import materializeWorkflow +from xml.dom.minidom import parseString +from itertools import izip + +from ast import literal_eval + +id_scope = None + +def translateVistrail(_vistrail): + """ Translate old annotation based vistrail variables to new + DBVistrailVariable class """ + global id_scope + + def update_workflow(old_obj, trans_dict): + return DBWorkflow.update_version(old_obj.db_workflow, + trans_dict, DBWorkflow()) + + def update_vistrail_variable(old_obj, trans_dict): + return DBVistrailVariable.update_version(old_obj.db_vistrailVariables, + trans_dict, DBVistrailVariable()) + + def update_operations(old_obj, trans_dict): + new_ops = [] + for obj in old_obj.db_operations: + if obj.vtType == 'delete': + new_ops.append(DBDelete.update_version(obj, trans_dict)) + elif obj.vtType == 'add': + new_op = DBAdd.update_version(obj, trans_dict) + new_ops.append(new_op) + elif obj.vtType == 'change': + new_op = DBChange.update_version(obj, trans_dict) + new_ops.append(new_op) + return new_ops + + def update_annotations(old_obj, trans_dict): + new_annotations = [] + for a in old_obj.db_annotations: + new_a = DBAnnotation.update_version(a, trans_dict) + new_annotations.append(new_a) + return new_annotations + + def update_actionAnnotations(old_obj, trans_dict): + new_actionAnnotations = [] + for aa in old_obj.db_actionAnnotations: + new_aa = DBActionAnnotation.update_version(aa, trans_dict) + new_actionAnnotations.append(new_aa) + return new_actionAnnotations + + translate_dict = {'DBWorkflow': {'operations': update_workflow}, + 'DBAction': {'operations': update_operations}, + 'DBGroup': {'workflow': update_workflow}, + 'DBVistrail': {'annotations': update_annotations, + 'actionAnnotations': \ + update_actionAnnotations, + 'DBVistrailVariable': \ + update_vistrail_variable + } + + } + vistrail = DBVistrail() + id_scope = vistrail.idScope + vistrail = DBVistrail.update_version(_vistrail, translate_dict, vistrail) + + vistrail.db_version = '1.0.4' + return vistrail + +def translateWorkflow(_workflow): + global id_scope + def update_workflow(old_obj, translate_dict): + return DBWorkflow.update_version(old_obj.db_workflow, translate_dict) + translate_dict = {'DBGroup': {'workflow': update_workflow}} + + workflow = DBWorkflow() + id_scope = IdScope(remap={DBAbstraction.vtType: DBModule.vtType, DBGroup.vtType: DBModule.vtType}) + workflow = DBWorkflow.update_version(_workflow, translate_dict, workflow) + workflow.db_version = '1.0.4' + return workflow + +def translateLog(_log): + translate_dict = {} + log = DBLog.update_version(_log, translate_dict) + log.db_version = '1.0.4' + return log + +def translateRegistry(_registry): + global id_scope + translate_dict = {} + registry = DBRegistry() + id_scope = registry.idScope + registry = DBRegistry.update_version(_registry, translate_dict, registry) + registry.db_version = '1.0.4' + return registry diff --git a/vistrails/gui/collection/vis_log.py b/vistrails/gui/collection/vis_log.py index bf5efc492..8902a9c7c 100644 --- a/vistrails/gui/collection/vis_log.py +++ b/vistrails/gui/collection/vis_log.py @@ -52,26 +52,19 @@ class QExecutionItem(QtGui.QTreeWidgetItem): """ - QExecutionListWidget is a widget containing a list of workflow executions. + QExecutionItem represents a workflow or module execution. """ - def __init__(self, execution, parent=None): + def __init__(self, execution, parent=None, prev=None): QtGui.QTreeWidgetItem.__init__(self, parent) self.execution = execution execution.item = self - - # find parent workflow or group - if parent is not None: - while (parent.parent() is not None and - not isinstance(parent.execution, GroupExec)): - parent = parent.parent() - self.wf_execution = parent.execution - else: - self.wf_execution = execution + self.modules = [] + self.wf_item = prev or self if isinstance(execution, WorkflowExec): for item_exec in execution.item_execs: - QExecutionItem(item_exec, self) + QExecutionItem(item_exec, self, self) if execution.completed == -2: brush = CurrentTheme.SUSPENDED_MODULE_BRUSH elif execution.completed == 1: @@ -84,12 +77,13 @@ def __init__(self, execution, parent=None): else: self.setText(0, 'Version #%s' % execution.parent_version ) elif isinstance(execution, ModuleExec): + prev.modules.append(self) for loop_exec in execution.loop_execs: - QExecutionItem(loop_exec, self) + QExecutionItem(loop_exec, self, prev) if execution.completed == 1: if execution.error: brush = CurrentTheme.ERROR_MODULE_BRUSH - self.wf_execution.completed = -1 + self.wf_item.execution.completed = -1 elif execution.cached: brush = CurrentTheme.NOT_EXECUTED_MODULE_BRUSH else: @@ -100,11 +94,15 @@ def __init__(self, execution, parent=None): brush = CurrentTheme.ERROR_MODULE_BRUSH self.setText(0, '%s' % execution.module_name) elif isinstance(execution, GroupExec): + prev.modules.append(self) for item_exec in execution.item_execs: - QExecutionItem(item_exec, self) + if isinstance(item_exec, LoopExec): + QExecutionItem(item_exec, self, prev) + else: + QExecutionItem(item_exec, self, self) if execution.completed == 1: if execution.error: - self.wf_execution.completed = -1 + self.wf_item.execution.completed = -1 brush = CurrentTheme.ERROR_MODULE_BRUSH elif execution.cached: brush = CurrentTheme.NOT_EXECUTED_MODULE_BRUSH @@ -117,15 +115,15 @@ def __init__(self, execution, parent=None): self.setText(0, 'Group') elif isinstance(execution, LoopExec): for iteration in execution.loop_iterations: - QExecutionItem(iteration, self) + QExecutionItem(iteration, self, prev) brush = CurrentTheme.MODULE_BRUSH self.setText(0, 'Loop') elif isinstance(execution, LoopIteration): for item_exec in execution.item_execs: - QExecutionItem(item_exec, self) + QExecutionItem(item_exec, self, prev) if execution.completed == 1: if execution.error: - self.wf_execution.completed = -1 + self.wf_item.execution.completed = -1 brush = CurrentTheme.ERROR_MODULE_BRUSH else: brush = CurrentTheme.SUCCESS_MODULE_BRUSH @@ -226,7 +224,7 @@ class QLogDetails(QtGui.QWidget, QVistrailsPaletteInterface): def __init__(self, parent=None): QtGui.QWidget.__init__(self, parent) self.execution = None - self.parentExecution = None + self.parentItem = None self.set_title("Log Details") self.legend = QLegendWidget() self.executionList = QExecutionListWidget() @@ -268,9 +266,9 @@ def addButtonsToToolbar(self): self.openVersionAction) def openVersion(self): - if not hasattr(self.parentExecution, 'item'): + if not hasattr(self.parentItem, 'item'): return - version = self.parentExecution.item.wf_execution.parent_version + version = self.parentItem.item.wf_item.parent_version from vistrails.gui.vistrails_window import _app _app.get_current_view().version_selected(version, True) self.controller.recompute_terse_graph() @@ -296,19 +294,19 @@ def set_execution(self): if self.isDoubling: self.isDoubling = False return - if isinstance(item.wf_execution, GroupExec): + if isinstance(item.wf_item, GroupExec): self.backButton.show() else: self.backButton.hide() - self.notify_app(item.wf_execution, item.execution) + self.notify_app(item.wf_item, item.execution) - def notify_app(self, wf_execution, execution): + def notify_app(self, wf_item, execution): # make sure it is only called once if self.isUpdating: return self.isUpdating = True from vistrails.gui.vistrails_window import _app - _app.notify("execution_changed", wf_execution, execution) + _app.notify("execution_changed", wf_item, execution) self.isUpdating = False def set_controller(self, controller): @@ -327,11 +325,11 @@ def set_controller(self, controller): self.log = None self.executionList.set_log(self.log) - def execution_changed(self, wf_execution, execution): + def execution_changed(self, wf_item, execution): if not execution: return self.execution = execution - self.parentExecution = wf_execution + self.parentItem = wf_item text = '' if hasattr(execution, 'item') and \ not execution.item == self.executionList.currentItem(): @@ -344,8 +342,9 @@ def execution_changed(self, wf_execution, execution): text += 'User: %s\n' % execution.user if hasattr(execution, 'cached'): text += 'Cached: %s\n' % ("Yes" if execution.cached else 'No') - text += 'Completed: %s\n' % {'0':'No', '1':'Yes'}.get( - str(execution.completed), 'No') + if hasattr(execution, 'completed'): + text += 'Completed: %s\n' % {'0':'No', '1':'Yes'}.get( + str(execution.completed), 'No') if hasattr(execution, 'error') and execution.error: text += 'Error: %s\n' % execution.error annotations = execution.db_annotations \ @@ -361,16 +360,16 @@ def singleClick(self, item, col): if self.isDoubling: self.isDoubling = False return - if isinstance(item.wf_execution, GroupExec): + if isinstance(item.wf_item, GroupExec): self.backButton.show() else: self.backButton.hide() - self.notify_app(item.wf_execution, item.execution) + self.notify_app(item.wf_item, item.execution) def doubleClick(self, item, col): # only difference here is that we should show contents of GroupExecs self.isDoubling = True - if isinstance(item.wf_execution, GroupExec): + if isinstance(item.wf_item, GroupExec): self.backButton.show() else: self.backButton.hide() @@ -378,13 +377,13 @@ def doubleClick(self, item, col): # use itself as the workflow self.notify_app(item.execution, item.execution) else: - self.notify_app(item.wf_execution, item.execution) + self.notify_app(item.wf_item, item.execution) def goBack(self): - if not isinstance(self.parentExecution, GroupExec): + if not isinstance(self.parentItem.execution, GroupExec): self.backButton.hide() - self.notify_app(self.parentExecution.item.wf_execution, - self.parentExecution) + self.notify_app(self.parentItem.item.wf_item, + self.parentItem) def update_selection(self): if hasattr(self.execution, 'item') and \ @@ -398,10 +397,8 @@ def __init__(self, parent=None): self.set_title("Provenance") self.log = None self.execution = None - self.parentExecution = None + self.parentItem = None self.isUpdating = False - # self.exec_to_wf_map = {} - # self.workflow_execs = [] # Hook shape selecting functions self.connect(self.scene(), QtCore.SIGNAL("moduleSelected"), self.moduleSelected) @@ -419,19 +416,18 @@ def set_action_defaults(self): 'publishPaper': [('setEnabled', False, False)], }) - def notify_app(self, wf_execution, execution): + def notify_app(self, wf_item, execution): # make sure it is only called once if self.isUpdating: return self.isUpdating = True from vistrails.gui.vistrails_window import _app - _app.notify("execution_changed", wf_execution, execution) + _app.notify("execution_changed", wf_item, execution) self.isUpdating = False def set_controller(self, controller): QPipelineView.set_controller(self, controller) - #print "@@@ set_controller called", id(self.controller), len(self.controller.vistrail.actions) if not hasattr(self.controller, 'loaded_workflow_execs'): self.controller.loaded_workflow_execs = {} for e in self.controller.read_log().workflow_execs: @@ -450,18 +446,17 @@ def moduleSelected(self, id, selectedItems): """ moduleSelected(id: int, selectedItems: [QGraphicsItem]) -> None """ if len(selectedItems)!=1 or id==-1: - if self.execution != self.parentExecution: - self.notify_app(self.parentExecution, self.parentExecution) -# self.moduleUnselected() + if self.execution != self.parentItem.execution: + self.notify_app(self.parentItem, self.parentItem.execution) return item = selectedItems[0] if hasattr(item,'execution') and item.execution: if self.execution != item.execution: item = self.scene().selectedItems()[0] - self.notify_app(self.parentExecution, item.execution) - elif self.execution != self.parentExecution: - self.notify_app(self.parentExecution, self.parentExecution) + self.notify_app(self.parentItem, item.execution) + elif self.execution != self.parentItem.execution: + self.notify_app(self.parentItem, self.parentItem.execution) def set_exec_by_id(self, exec_id): if not self.log: @@ -472,7 +467,7 @@ def set_exec_by_id(self, exec_id): except ValueError: return False if len(workflow_execs): - self.notify_app(workflow_execs[0], workflow_execs[0]) + self.notify_app(workflow_execs[0].item, workflow_execs[0]) return True return False @@ -482,7 +477,7 @@ def set_exec_by_date(self, exec_date): workflow_execs = [e for e in self.log if str(e.ts_start) == str(exec_date)] if len(workflow_execs): - self.notify_app(workflow_execs[0], workflow_execs[0]) + self.notify_app(workflow_execs[0].item, workflow_execs[0]) return True return False @@ -494,46 +489,33 @@ def get_execution_pipeline(self, execution): return self.controller.vistrail.getPipeline(version) if isinstance(execution, GroupExec): - parent = execution.item.wf_execution + parent = execution.item.wf_item.execution parent_pipeline = self.get_execution_pipeline(parent) return parent_pipeline.db_get_module_by_id( execution.db_module_id).pipeline - def execution_changed(self, wf_execution, execution): + def execution_changed(self, wf_item, execution): self.execution = execution - if self.parentExecution != wf_execution: - self.parentExecution = wf_execution - self.pipeline = self.get_execution_pipeline(wf_execution) + if self.parentItem != wf_item: + self.parentItem = wf_item + self.pipeline = self.get_execution_pipeline(wf_item.execution) self.update_pipeline() self.update_selection() - # if idx < len(self.workflow_execs) and idx >= 0: - # self.execution = self.workflow_execs[idx] - # else: - # self.execution = None - - # self.currentItem = self.workflow_execs[idx] - # self.execution = item.execution - # self.workflowExecution = item - # while self.workflowExecution.parent(): - # self.workflowExecution = self.workflowExecution.parent() - # self.workflowExecution = self.workflowExecution.execution - # self.parentExecution = item - # while self.parentExecution.execution.__class__ not in \ - # [WorkflowExec, LoopExec, GroupExec]: - # self.parentExecution = self.parentExecution.parent() - # self.parentExecution = self.parentExecution.execution - # self.showExecution() - def update_pipeline(self): - #print "ACTIONS!" - #print "#### controller", id(self.controller) scene = self.scene() scene.clearItems() self.pipeline.validate(False) - module_execs = dict([(e.module_id, e) - for e in self.parentExecution.item_execs]) + modules = [(e.execution.module_id, e.execution) for e in self.parentItem.modules + if hasattr(e.execution, 'module_id')] + modules.reverse() + module_execs = {} + for id, m in modules: + if id not in module_execs: + module_execs[id] = [] + module_execs[id].append(m) + # controller = DummyController(self.pipeline) scene.controller = self.controller self.moduleItems = {} @@ -541,7 +523,7 @@ def update_pipeline(self): module = self.pipeline.modules[m_id] brush = CurrentTheme.PERSISTENT_MODULE_BRUSH if m_id in module_execs: - e = module_execs[m_id] + e = module_execs[m_id][-1] if e.completed == 1: if e.error: brush = CurrentTheme.ERROR_MODULE_BRUSH @@ -558,9 +540,9 @@ def update_pipeline(self): item.controller = self.controller self.moduleItems[m_id] = item if m_id in module_execs: - e = module_execs[m_id] - item.execution = e - e.module = item + for e in module_execs[m_id]: + item.execution = e + e.module = item else: item.execution = None connectionItems = [] @@ -581,8 +563,8 @@ def update_selection(self): self.isUpdating = True module = None if (isinstance(self.execution, ModuleExec) or \ - (isinstance(self.execution, GroupExec) and - self.execution == self.parentExecution)) and \ + isinstance(self.execution, GroupExec)) and \ + hasattr(self.execution, 'module') and \ not self.execution.module.isSelected(): self.execution.module.setSelected(True) module = self.execution.module diff --git a/vistrails/gui/job_monitor.py b/vistrails/gui/job_monitor.py index 30f2e235e..9298987ec 100644 --- a/vistrails/gui/job_monitor.py +++ b/vistrails/gui/job_monitor.py @@ -179,10 +179,12 @@ def startWorkflow(self, workflow): """ def addJobRec(self, obj, parent_id=None): - """Recursively adds jobs. + """Recursively adds jobs that are executed by other modules like + Groups and Maps. This is only for display purposes. """ workflow = self.jobMonitor.currentWorkflow() workflowItem = self.workflowItems[workflow.id] + # top down. Base is assumed to have been added already base = (workflowItem.intermediates[parent_id] if parent_id is not None else workflowItem) id = obj.signature diff --git a/vistrails/gui/module_iteration.py b/vistrails/gui/module_iteration.py new file mode 100644 index 000000000..54081b900 --- /dev/null +++ b/vistrails/gui/module_iteration.py @@ -0,0 +1,523 @@ +############################################################################### + +## +## Copyright (C) 2011-2014, NYU-Poly. +## Copyright (C) 2006-2011, University of Utah. +## All rights reserved. +## Contact: contact@vistrails.org +## +## This file is part of VisTrails. +## +## "Redistribution and use in source and binary forms, with or without +## modification, are permitted provided that the following conditions are met: +## +## - Redistributions of source code must retain the above copyright notice, +## this list of conditions and the following disclaimer. +## - Redistributions in binary form must reproduce the above copyright +## notice, this list of conditions and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## - Neither the name of the University of Utah nor the names of its +## contributors may be used to endorse or promote products derived from +## this software without specific prior written permission. +## +## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." +## +############################################################################### +""" This file contains a dialog for editing options for how the given + VisTrails module is looped. + +QModuleIteration +""" +from PyQt4 import QtCore, QtGui +from vistrails.core.modules.vistrails_module import LOOP_KEY, \ + WHILE_COND_KEY, WHILE_INPUT_KEY, WHILE_OUTPUT_KEY, WHILE_MAX_KEY, \ + WHILE_DELAY_KEY +from vistrails.gui.theme import CurrentTheme +from vistrails.gui.vistrails_palette import QVistrailsPaletteInterface + +import json +import unittest + +############################################################################### + +class QModuleIteration(QtGui.QDialog, QVistrailsPaletteInterface): + """ + QModuleIteration is a dialog for editing module looping options. + + """ + def __init__(self, parent=None): + """ + QModuleIteration(parent) + -> None + + """ + QtGui.QDialog.__init__(self, parent) + self.setWindowTitle("Module Looping") + self.createButtons() + self.update_module() + + def createButtons(self): + """ createButtons() -> None + Create and connect signals to Ok & Cancel button + + """ + self.controller = None + self.state_changed = False + self.module = None + self.setLayout(QtGui.QVBoxLayout()) + # self.layout().addStrut() + layout = QtGui.QHBoxLayout() + type_group = QtGui.QButtonGroup(self) # Number group + layout.addWidget(QtGui.QLabel("Port list combination method:")) + self.pairwiseButton = QtGui.QRadioButton("Pairwise") + self.pairwiseButton.setToolTip("Execute multiple looped input ports pairwise:" + " [(A, B), (C, D)] -> [(A, C), (B, D)]") + type_group.addButton(self.pairwiseButton) + layout.addWidget(self.pairwiseButton) + layout.setStretch(0, 0) + self.cartesianButton = QtGui.QRadioButton("Cartesian") + self.cartesianButton.setToolTip("Execute multiple looped input ports using cartesian product:" + " [(A, B), (C, D)] -> [(A, C), (A, D), (B, C), (B, D)]") + self.cartesianButton.setChecked(True) + type_group.addButton(self.cartesianButton) + layout.addWidget(self.cartesianButton) + layout.setStretch(1, 0) + self.customButton = QtGui.QRadioButton("Custom") + self.customButton.setToolTip("Build a custom combination using pairwise/cartesian functions") + type_group.addButton(self.customButton) + layout.addWidget(self.customButton) + layout.setStretch(2, 0) + layout.addStretch(1) + self.layout().addLayout(layout) + self.layout().setStretch(0, 0) + + self.portCombiner = QPortCombineTreeWidget() + self.layout().addWidget(self.portCombiner) + self.portCombiner.setVisible(False) + + whileLayout = QtGui.QVBoxLayout() + + self.whileButton = QtGui.QCheckBox("While Loop") + self.whileButton.setToolTip('Repeatedly execute module until a specified output port has a false value') + whileLayout.addWidget(self.whileButton) + whileLayout.setStretch(0, 0) + + layout = QtGui.QHBoxLayout() + self.condLabel = QtGui.QLabel("Condition output port:") + layout.addWidget(self.condLabel) + layout.setStretch(0, 0) + self.condEdit = QtGui.QLineEdit() + self.condEdit.setToolTip('Name of output port containing the condition of the loop') + layout.addWidget(self.condEdit) + layout.setStretch(1, 1) + whileLayout.addLayout(layout) + whileLayout.setStretch(1, 0) + + layout = QtGui.QHBoxLayout() + self.maxLabel = QtGui.QLabel("Max iterations:") + layout.addWidget(self.maxLabel) + layout.setStretch(0, 0) + self.maxEdit = QtGui.QLineEdit() + self.maxEdit.setValidator(QtGui.QIntValidator()) + self.maxEdit.setToolTip('Fail after this number of iterations have been reached (default=20)') + layout.addWidget(self.maxEdit) + layout.setStretch(1, 1) + whileLayout.addLayout(layout) + whileLayout.setStretch(2, 0) + + layout = QtGui.QHBoxLayout() + self.delayLabel = QtGui.QLabel("Delay:") + layout.addWidget(self.delayLabel) + layout.setStretch(0, 0) + self.delayEdit = QtGui.QLineEdit() + self.delayEdit.setValidator(QtGui.QDoubleValidator(self)) + self.delayEdit.setToolTip('Delay between iterations in fractions of seconds') + layout.addWidget(self.delayEdit) + layout.setStretch(1, 1) + whileLayout.addLayout(layout) + whileLayout.setStretch(2, 0) + + layout = QtGui.QHBoxLayout() + self.feedInputLabel = QtGui.QLabel("Feedback Input port:") + layout.addWidget(self.feedInputLabel) + layout.setStretch(0, 0) + self.feedInputEdit = QtGui.QLineEdit() + self.feedInputEdit.setToolTip('Name of input port to feed the value from last iteration') + layout.addWidget(self.feedInputEdit) + layout.setStretch(1, 1) + whileLayout.addLayout(layout) + whileLayout.setStretch(3, 0) + + layout = QtGui.QHBoxLayout() + self.feedOutputLabel = QtGui.QLabel("Feedback Output port:") + layout.addWidget(self.feedOutputLabel) + layout.setStretch(0, 0) + self.feedOutputEdit = QtGui.QLineEdit() + self.feedOutputEdit.setToolTip('Name of output port to feed to next iteration') + layout.addWidget(self.feedOutputEdit) + layout.setStretch(1, 1) + whileLayout.addLayout(layout) + whileLayout.setStretch(4, 0) + + whileLayout.addStretch(1) + self.layout().addLayout(whileLayout) + + self.layout().addStretch(1) + self.buttonLayout = QtGui.QHBoxLayout() + self.buttonLayout.setMargin(5) + self.saveButton = QtGui.QPushButton('&Save', self) + self.saveButton.setFixedWidth(100) + self.saveButton.setEnabled(False) + self.buttonLayout.addWidget(self.saveButton) + self.resetButton = QtGui.QPushButton('&Reset', self) + self.resetButton.setFixedWidth(100) + self.resetButton.setEnabled(False) + self.buttonLayout.addWidget(self.resetButton) + self.layout().addLayout(self.buttonLayout) + self.connect(self.saveButton, QtCore.SIGNAL('clicked(bool)'), + self.saveTriggered) + self.connect(self.resetButton, QtCore.SIGNAL('clicked(bool)'), + self.resetTriggered) + self.layout().setStretch(2, 0) + self.update_module() + self.pairwiseButton.toggled.connect(self.stateChanged) + self.cartesianButton.toggled.connect(self.stateChanged) + self.customButton.toggled.connect(self.stateChanged) + self.customButton.toggled.connect(self.customToggled) + self.portCombiner.itemChanged.connect(self.stateChanged) + self.whileButton.toggled.connect(self.stateChanged) + self.whileButton.toggled.connect(self.whileToggled) + self.condEdit.textChanged.connect(self.stateChanged) + self.maxEdit.textChanged.connect(self.stateChanged) + self.delayEdit.textChanged.connect(self.stateChanged) + self.feedInputEdit.textChanged.connect(self.stateChanged) + self.feedOutputEdit.textChanged.connect(self.stateChanged) + + def sizeHint(self): + """ sizeHint() -> QSize + Return the recommended size of the configuration window + + """ + return QtCore.QSize(512, 256) + + def saveTriggered(self, checked = False): + """ saveTriggered(checked: bool) -> None + Update vistrail controller and module when the user click Ok + + """ + if self.updateVistrail(): + self.saveButton.setEnabled(False) + self.resetButton.setEnabled(False) + self.state_changed = False + self.emit(QtCore.SIGNAL("stateChanged")) + self.emit(QtCore.SIGNAL('doneConfigure'), self.module.id) + + def resetTriggered(self, checked = False): + self.state_changed = False + self.update_module(self.module) + self.saveButton.setEnabled(False) + self.resetButton.setEnabled(False) + + def stateChanged(self, state=False, other=None): + self.saveButton.setEnabled(True) + self.resetButton.setEnabled(True) + self.state_changed = True + + def customToggled(self, state=False): + self.portCombiner.setVisible(state) + + def whileToggled(self, state=False): + if state: + self.condEdit.setVisible(True) + self.maxEdit.setVisible(True) + self.delayEdit.setVisible(True) + self.feedInputEdit.setVisible(True) + self.feedOutputEdit.setVisible(True) + self.condLabel.setVisible(True) + self.maxLabel.setVisible(True) + self.delayLabel.setVisible(True) + self.feedInputLabel.setVisible(True) + self.feedOutputLabel.setVisible(True) + self.condEdit.setText('') + self.maxEdit.setText('') + self.delayEdit.setText('') + self.feedInputEdit.setText('') + self.feedOutputEdit.setText('') + else: + self.condEdit.setVisible(False) + self.maxEdit.setVisible(False) + self.delayEdit.setVisible(False) + self.feedInputEdit.setVisible(False) + self.feedOutputEdit.setVisible(False) + self.condLabel.setVisible(False) + self.maxLabel.setVisible(False) + self.delayLabel.setVisible(False) + self.feedInputLabel.setVisible(False) + self.feedOutputLabel.setVisible(False) + + def closeEvent(self, event): + self.askToSaveChanges() + event.accept() + + def set_controller(self, controller): + self.controller = controller + if not controller: + return + scene = controller.current_pipeline_scene + selected_ids = scene.get_selected_module_ids() + modules = [controller.current_pipeline.modules[i] + for i in selected_ids] + if len(modules) == 1: + self.update_module(modules[0]) + else: + self.update_module(None) + + def update_module(self, module=None): + self.module = module + if not module: + self.pairwiseButton.setEnabled(False) + self.cartesianButton.setEnabled(False) + self.customButton.setEnabled(False) + self.whileButton.setEnabled(False) + self.condEdit.setVisible(False) + self.maxEdit.setVisible(False) + self.delayEdit.setVisible(False) + self.feedInputEdit.setVisible(False) + self.feedOutputEdit.setVisible(False) + self.condLabel.setVisible(False) + self.maxLabel.setVisible(False) + self.delayLabel.setVisible(False) + self.feedInputLabel.setVisible(False) + self.feedOutputLabel.setVisible(False) + self.portCombiner.setVisible(False) + return + # set defaults + self.pairwiseButton.setEnabled(True) + self.cartesianButton.setEnabled(True) + self.cartesianButton.setChecked(True) + self.customButton.setEnabled(True) + + self.whileButton.setEnabled(True) + self.whileButton.setChecked(False) + self.condEdit.setVisible(False) + self.maxEdit.setVisible(False) + self.delayEdit.setVisible(False) + self.feedInputEdit.setVisible(False) + self.feedOutputEdit.setVisible(False) + self.condLabel.setVisible(False) + self.maxLabel.setVisible(False) + self.delayLabel.setVisible(False) + self.feedInputLabel.setVisible(False) + self.feedOutputLabel.setVisible(False) + self.portCombiner.setVisible(False) + self.portCombiner.setDefault(module) + if module.has_control_parameter_with_name(LOOP_KEY): + type = module.get_control_parameter_by_name(LOOP_KEY).value + self.pairwiseButton.setChecked(type=='pairwise') + self.cartesianButton.setChecked(type=='cartesian') + self.customButton.setChecked(type not in ['pairwise', 'cartesian']) + self.portCombiner.setVisible(type not in ['pairwise', 'cartesian']) + if type not in ['pairwise', 'cartesian']: + self.portCombiner.setValue(type) + if module.has_control_parameter_with_name(WHILE_COND_KEY) or \ + module.has_control_parameter_with_name(WHILE_MAX_KEY): + self.whileButton.setChecked(True) + if module.has_control_parameter_with_name(WHILE_COND_KEY): + cond = module.get_control_parameter_by_name(WHILE_COND_KEY).value + self.condEdit.setText(cond) + if module.has_control_parameter_with_name(WHILE_MAX_KEY): + max = module.get_control_parameter_by_name(WHILE_MAX_KEY).value + self.maxEdit.setText(max) + if module.has_control_parameter_with_name(WHILE_DELAY_KEY): + delay = module.get_control_parameter_by_name(WHILE_DELAY_KEY).value + self.delayEdit.setText(delay) + if module.has_control_parameter_with_name(WHILE_INPUT_KEY): + input = module.get_control_parameter_by_name(WHILE_INPUT_KEY).value + self.feedInputEdit.setText(input) + if module.has_control_parameter_with_name(WHILE_OUTPUT_KEY): + output = module.get_control_parameter_by_name(WHILE_OUTPUT_KEY).value + self.feedOutputEdit.setText(output) + + def updateVistrail(self): + values = [] + if self.pairwiseButton.isChecked(): + value = 'pairwise' + elif self.cartesianButton.isChecked(): + value = 'cartesian' + else: + value = self.portCombiner.getValue() + values.append((LOOP_KEY, value)) + _while = self.whileButton.isChecked() + values.append((WHILE_COND_KEY, _while and self.condEdit.text())) + values.append((WHILE_MAX_KEY, _while and self.maxEdit.text())) + values.append((WHILE_DELAY_KEY, _while and self.delayEdit.text())) + values.append((WHILE_INPUT_KEY, _while and self.feedInputEdit.text())) + values.append((WHILE_OUTPUT_KEY,_while and self.feedOutputEdit.text())) + for name, value in values: + if value: + if not self.module.has_control_parameter_with_name(name) or \ + value != \ + self.module.get_control_parameter_by_name(name).value: + if self.module.has_control_parameter_with_name(name): + self.controller.delete_control_parameter(name, + self.module.id) + self.controller.add_control_parameter((name, value), + self.module.id) + elif self.module.has_control_parameter_with_name(name): + self.controller.delete_control_parameter(name, self.module.id) + return True + + def activate(self): + if self.isVisible() == False: + self.show() + self.activateWindow() + +PORTITEM = 1000 +DOTITEM = 1001 +CROSSITEM = 1002 +class PortItem(QtGui.QTreeWidgetItem): + def __init__(self, port_name, parent=None): + QtGui.QTreeWidgetItem.__init__(self, parent, PORTITEM) + self.setText(0, port_name) + self.setFlags(self.flags() & ~QtCore.Qt.ItemIsDropEnabled) + +class DotItem(QtGui.QTreeWidgetItem): + def __init__(self, parent=None): + QtGui.QTreeWidgetItem.__init__(self, parent, DOTITEM) + self.setExpanded(True) + self.setIcon(0, CurrentTheme.DOT_PRODUCT_ICON) + self.setText(0, 'Dot') + +class CrossItem(QtGui.QTreeWidgetItem): + def __init__(self, parent=None): + QtGui.QTreeWidgetItem.__init__(self, parent, CROSSITEM) + self.setExpanded(True) + self.setIcon(0, CurrentTheme.CROSS_PRODUCT_ICON) + self.setText(0, 'Cross') + +class QPortCombineTreeWidget(QtGui.QTreeWidget): + def __init__(self, parent=None): + QtGui.QTreeWidget.__init__(self, parent) + self.setDragDropMode(QtGui.QAbstractItemView.InternalMove) + self.header().hide() + self.setExpandsOnDoubleClick(False) + self.setItemsExpandable(False) + self.setRootIsDecorated(False) + self.expandAll() + self.setToolTip("Right-click to add dot/cross product. Rearrange " + "items to get suitable order. 'Del' key deletes " + "selected product.") + + def dropEvent(self, event): + QtGui.QTreeWidget.dropEvent(self, event) + self.expandAll() + + def loadNode(self, parent, node): + # populate widget from json struct + if isinstance(node, basestring): + PortItem(node, parent) + else: + item = DotItem(parent) if node[0] == 'pairwise' \ + else CrossItem(parent) + for i in node[1:]: + self.loadNode(item, i) + + def saveNode(self, item): + # populate json struct from widget items + if item.type()==PORTITEM: + return item.text(0) + L = ['pairwise'] if item.type()==DOTITEM else ['cartesian'] + L.extend([self.saveNode(item.child(i)) + for i in xrange(item.childCount())]) + L = [i for i in L if i is not None] + if len(L)<2: + L = None + return L + + def setValue(self, value): + self.clear() + value = json.loads(value) + for v in value[1:]: + self.loadNode(self.invisibleRootItem(), v) + + def getValue(self): + nodes = [self.topLevelItem(i) + for i in xrange(self.topLevelItemCount())] + L = ['cartesian'] # default + L.extend([self.saveNode(node) for node in nodes]) + L = [i for i in L if i is not None] + if len(L)<2: + L = None + return json.dumps(L) + + def getPorts(self, node, ports=None): + if ports is None: + ports = [] + # extract ports in json struct + if isinstance(node, basestring): + ports.append(node) + else: + [self.getPorts(i, ports) for i in [node[1:]]] + return ports + + def setDefault(self, module): + self.clear() + if not module: + return + for port_name in module.iterated_ports: + PortItem(port_name, self) + + def contextMenuEvent(self, event): + menu = QtGui.QMenu() + dotAction = QtGui.QAction(CurrentTheme.DOT_PRODUCT_ICON, + 'Add Pairwise Product', self) + dotAction.triggered.connect(self.addDot) + menu.addAction(dotAction) + crossAction = QtGui.QAction(CurrentTheme.CROSS_PRODUCT_ICON, + 'Add Cartesian Product', self) + crossAction.triggered.connect(self.addCross) + menu.addAction(crossAction) + menu.exec_(event.globalPos()) + event.accept() + + def addDot(self): + DotItem(self) + + def addCross(self): + CrossItem(self) + + def keyPressEvent(self, event): + """ keyPressEvent(event: QKeyEvent) -> None + Capture 'Del', 'Backspace' for deleting items. + Ctrl+C, Ctrl+V, Ctrl+A for copy, paste and select all + + """ + items = self.selectedItems() + if (len(items)==1 and \ + event.key() in [QtCore.Qt.Key_Backspace, QtCore.Qt.Key_Delete]) and \ + type(items[0]) in [DotItem, CrossItem] and\ + not items[0].childCount(): + item = items[0] + if item.parent(): + item.parent().takeChild(item.parent().indexOfChild(item)) + else: + self.takeTopLevelItem(self.indexOfTopLevelItem(item)) + else: + QtGui.QTreeWidget.keyPressEvent(self, event) + +class TestIterationGui(unittest.TestCase): + def testGetSet(self): + p = QPortCombineTreeWidget() + v = '["cartesian", ["pairwise", "a", "b"], "c"]' + p.setValue(v) + self.assertEqual(v, p.getValue()) diff --git a/vistrails/gui/modules/source_configure.py b/vistrails/gui/modules/source_configure.py index 1fec8d944..d27126c1d 100644 --- a/vistrails/gui/modules/source_configure.py +++ b/vistrails/gui/modules/source_configure.py @@ -87,13 +87,13 @@ def __init__(self, module, controller, editor_class=None, def createPortTable(self, has_inputs=True, has_outputs=True): if has_inputs: self.inputPortTable = PortTable(self) - labels = ["Input Port Name", "Type"] + labels = ["Input Port Name", "Type", "List Depth"] self.inputPortTable.setHorizontalHeaderLabels(labels) self.inputPortTable.initializePorts(self.module.input_port_specs) self.layout().addWidget(self.inputPortTable) if has_outputs: self.outputPortTable = PortTable(self) - labels = ["Output Port Name", "Type"] + labels = ["Output Port Name", "Type", "List Depth"] self.outputPortTable.setHorizontalHeaderLabels(labels) self.outputPortTable.initializePorts(self.module.output_port_specs, True) @@ -197,21 +197,23 @@ def __init__(self, module, controller, editor_class=None, def createPortTable(self, has_inputs=True, has_outputs=True): if has_inputs: - self.inputPortTable = QtGui.QTableWidget(1, 2, self) - labels = ["Input Port Name", "Type"] + self.inputPortTable = QtGui.QTableWidget(1, 3, self) + labels = ["Input Port Name", "Type", "List Depth"] self.inputPortTable.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive) self.inputPortTable.horizontalHeader().setMovable(False) - self.inputPortTable.horizontalHeader().setStretchLastSection(True) + #self.inputPortTable.horizontalHeader().setStretchLastSection(True) + self.inputPortTable.horizontalHeader().setResizeMode(1, self.inputPortTable.horizontalHeader().Stretch) self.inputPortTable.setHorizontalHeaderLabels(labels) self.initializePorts(self.inputPortTable, self.module.input_port_specs) self.layout().addWidget(self.inputPortTable) if has_outputs: - self.outputPortTable = QtGui.QTableWidget(1, 2, self) - labels = ["Output Port Name", "Type"] + self.outputPortTable = QtGui.QTableWidget(1, 3, self) + labels = ["Output Port Name", "Type", "List Depth"] self.outputPortTable.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive) self.outputPortTable.horizontalHeader().setMovable(False) - self.outputPortTable.horizontalHeader().setStretchLastSection(True) + #self.outputPortTable.horizontalHeader().setStretchLastSection(True) + self.outputPortTable.horizontalHeader().setResizeMode(1, self.outputPortTable.horizontalHeader().Stretch) self.outputPortTable.setHorizontalHeaderLabels(labels) self.initializePorts(self.outputPortTable, @@ -235,12 +237,16 @@ def initializePorts(self, table, port_specs, reverse_order=False): sigstring = p.sigstring[1:-1] siglist = sigstring.split(':') short_name = "%s (%s)" % (siglist[1], siglist[0]) + item = QtGui.QTableWidgetItem(p.name) item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEnabled) table.setItem(row, 0, item) item = QtGui.QTableWidgetItem(short_name) item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEnabled) table.setItem(row, 1, item) + item = QtGui.QTableWidgetItem(str(p.depth)) + item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEnabled) + table.setItem(row, 2, item) table.setRowCount(table.rowCount()+1) def fixTableGeometry(self, table): diff --git a/vistrails/gui/modules/tuple_configuration.py b/vistrails/gui/modules/tuple_configuration.py index d9fab1a65..163600681 100644 --- a/vistrails/gui/modules/tuple_configuration.py +++ b/vistrails/gui/modules/tuple_configuration.py @@ -53,10 +53,12 @@ class PortTable(QtGui.QTableWidget): def __init__(self, parent=None): - QtGui.QTableWidget.__init__(self,1,2,parent) - self.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive) - self.horizontalHeader().setMovable(False) - self.horizontalHeader().setStretchLastSection(True) + QtGui.QTableWidget.__init__(self,1,3,parent) + horiz = self.horizontalHeader() + horiz.setResizeMode(QtGui.QHeaderView.Interactive) + horiz.setMovable(False) + #horiz.setStretchLastSection(True) + horiz.setResizeMode(1, horiz.Stretch) self.setSelectionMode(QtGui.QAbstractItemView.NoSelection) self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.delegate = PortTableItemDelegate(self) @@ -107,6 +109,9 @@ def initializePorts(self, port_specs, reverse_order=False): sigstring = p.sigstring[1:-1] siglist = sigstring.split(':') short_name = "%s (%s)" % (siglist[1], siglist[0]) + model.setData(model.index(self.rowCount()-1, 2), + p.depth, + QtCore.Qt.DisplayRole) model.setData(model.index(self.rowCount()-1, 1), sigstring, QtCore.Qt.UserRole) @@ -125,12 +130,11 @@ def getPorts(self): ports = [] model = self.model() for i in xrange(self.rowCount()): - name = model.data(model.index(i, 0), - QtCore.Qt.DisplayRole) - sigstring = model.data(model.index(i, 1), - QtCore.Qt.UserRole) + name = model.data(model.index(i, 0), QtCore.Qt.DisplayRole) + sigstring = model.data(model.index(i, 1), QtCore.Qt.UserRole) + depth = model.data(model.index(i, 2), QtCore.Qt.DisplayRole) or 0 if name is not None and sigstring is not None: - ports.append((name, '(%s)' % sigstring, i)) + ports.append((name, '(%s)' % sigstring, i, depth)) return ports # def focusOutEvent(self, event): @@ -172,7 +176,11 @@ class PortTableItemDelegate(QtGui.QItemDelegate): def createEditor(self, parent, option, index): registry = get_module_registry() - if index.column()==1: #Port type + if index.column()==2: #Depth type + spinbox = QtGui.QSpinBox(parent) + spinbox.setValue(0) + return spinbox + elif index.column()==1: #Port type combo = CompletingComboBox(parent) # FIXME just use descriptors here!! variant_desc = registry.get_descriptor_by_name( @@ -199,14 +207,19 @@ def createEditor(self, parent, option, index): return QtGui.QItemDelegate.createEditor(self, parent, option, index) def setEditorData(self, editor, index): - if index.column()==1: + if index.column()==2: + data = index.model().data(index, QtCore.Qt.DisplayRole) + editor.setValue(data or 0) + elif index.column()==1: data = index.model().data(index, QtCore.Qt.UserRole) editor.setCurrentIndex(editor.findData(data)) else: QtGui.QItemDelegate.setEditorData(self, editor, index) def setModelData(self, editor, model, index): - if index.column()==1: + if index.column()==2: + model.setData(index, editor.value() or 0, QtCore.Qt.DisplayRole) + elif index.column()==1: editor.validate_input() model.setData(index, editor.itemData(editor.currentIndex()), QtCore.Qt.UserRole) @@ -343,10 +356,10 @@ def registryChanges(self, old_ports, new_ports): def getPortDiff(self, p_type, port_table): if p_type == 'input': - old_ports = [(p.name, p.sigstring, p.sort_key) + old_ports = [(p.name, p.sigstring, p.sort_key, p.depth) for p in self.module.input_port_specs] elif p_type == 'output': - old_ports = [(p.name, p.sigstring, p.sort_key) + old_ports = [(p.name, p.sigstring, p.sort_key, p.depth) for p in self.module.output_port_specs] else: old_ports = [] @@ -389,7 +402,7 @@ def __init__(self, module, controller, parent=None): # Then add a PortTable to our configuration widget self.portTable = PortTable(self) self.portTable.setHorizontalHeaderLabels( - ['Input Port Name', 'Type']) + ['Input Port Name', 'Type', 'List Depth']) # We know that the Tuple module initially doesn't have any # input port, we just use the local registry to see what ports diff --git a/vistrails/gui/pipeline_view.py b/vistrails/gui/pipeline_view.py index 933fff89c..6c5256129 100644 --- a/vistrails/gui/pipeline_view.py +++ b/vistrails/gui/pipeline_view.py @@ -613,6 +613,7 @@ def contextMenuEvent(self, event): menu.addAction(self.annotateAct) menu.addAction(self.viewDocumentationAct) menu.addAction(self.changeModuleLabelAct) + menu.addAction(self.editLoopingAct) menu.addAction(self.setBreakpointAct) menu.addAction(self.setWatchedAct) menu.addAction(self.runModuleAct) @@ -641,6 +642,11 @@ def createActions(self): QtCore.QObject.connect(self.viewDocumentationAct, QtCore.SIGNAL("triggered()"), self.viewDocumentation) + self.editLoopingAct = QtGui.QAction("Looping Options", self.scene()) + self.editLoopingAct.setStatusTip("Edit looping options") + QtCore.QObject.connect(self.editLoopingAct, + QtCore.SIGNAL("triggered()"), + self.editLooping) self.changeModuleLabelAct = QtGui.QAction("Set Module Label...", self.scene()) self.changeModuleLabelAct.setStatusTip("Set or remove module label") QtCore.QObject.connect(self.changeModuleLabelAct, @@ -718,6 +724,13 @@ def viewDocumentation(self): assert self.moduleId >= 0 self.scene().open_documentation_window(self.moduleId) + def editLooping(self): + """ editLooping() -> None + Show the looping options for the module + """ + assert self.moduleId >= 0 + self.scene().open_looping_window(self.moduleId) + def changeModuleLabel(self): """ changeModuleLabel() -> None Show the module label configuration widget @@ -824,6 +837,8 @@ def __init__(self, Create the shape, initialize its pen and brush accordingly """ + self.srcPortItem = srcPortItem + self.dstPortItem = dstPortItem path = self.create_path(srcPortItem.getPosition(), dstPortItem.getPosition()) QtGui.QGraphicsPathItem.__init__(self, path, parent) @@ -831,8 +846,6 @@ def __init__(self, # Bump it slightly higher than the highest module self.setZValue(max(srcModule.id, dstModule.id) + 0.1) - self.srcPortItem = srcPortItem - self.dstPortItem = dstPortItem self.connectionPen = CurrentTheme.CONNECTION_PEN self.connectingModules = (srcModule, dstModule) self.ghosted = False @@ -868,8 +881,9 @@ def paint(self, painter, option, widget=None): painter.setPen(self.connectionPen) painter.drawPath(self.path()) - def setupConnection(self, startPos, endPos): - path = self.create_path(startPos, endPos) + def setupConnection(self, startPos=None, endPos=None): + path = self.create_path(startPos or self.startPos, + endPos or self.endPos) self.setPath(path) def create_path(self, startPos, endPos): @@ -936,8 +950,27 @@ def create_path(self, startPos, endPos): # self._control_2 = endPos - displacement - path = QtGui.QPainterPath(self.startPos) - path.cubicTo(self._control_1, self._control_2, self.endPos) + # draw multiple connections depending on list depth + + def diff(i, depth): + return QtCore.QPointF((5.0 + 10.0*i)/depth - 5.0, 0.0) + + startDepth = self.srcPortItem.parentItem().module.list_depth + 1 + endDepth = self.dstPortItem.parentItem().module.list_depth + 1 + starts = [diff(i, startDepth) for i in xrange(startDepth)] + ends = [diff(i, endDepth) for i in xrange(endDepth)] + + first = True + for start in starts: + for end in ends: + if first: + path = QtGui.QPainterPath(self.startPos + start) + first = False + else: + path.moveTo(self.startPos + start) + path.cubicTo(self._control_1, self._control_2, + self.endPos + end) + return path def itemChange(self, change, value): @@ -1940,7 +1973,10 @@ def addConnection(self, connection, connectionBrush=None): if srcModule.module.is_vistrail_var(): connectionItem.hide() var_uuid = srcModule.module.get_vistrail_var() + dstPortItem.addVistrailVar( + self.controller.get_vistrail_variable_by_uuid(var_uuid)) dstPortItem.addVistrailVar(var_uuid) + self.update_connections() return connectionItem def selected_subgraph(self): @@ -2000,6 +2036,7 @@ def remove_connection(self, c_id): self.removeItem(self.connections[c_id]) del self.connections[c_id] self._old_connection_ids.remove(c_id) + self.update_connections() def recreate_module(self, pipeline, m_id): @@ -2047,7 +2084,9 @@ def setupScene(self, pipeline): # clear things self.clear() if not pipeline: return - + + self.pipeline.mark_list_depth() + needReset = len(self.items())==0 try: new_modules = set(pipeline.modules) @@ -2128,6 +2167,7 @@ def setupScene(self, pipeline): self.reset_module_colors() for m_id in selected_modules: self.modules[m_id].setSelected(True) + except ModuleRegistryException, e: import traceback traceback.print_exc() @@ -2544,6 +2584,15 @@ def add_tmp_module(self, desc): return self.tmp_module_item + def update_connections(self): + for module_id, list_depth in \ + self.controller.current_pipeline.mark_list_depth(): + if module_id in self.modules: + self.modules[module_id].module.list_depth = list_depth + for c in self.connections.itervalues(): + c.setupConnection() + + def delete_tmp_module(self): if self.tmp_module_item is not None: self.removeItem(self.tmp_module_item) @@ -2859,6 +2908,13 @@ def open_documentation_window(self, id): from vistrails.gui.vistrails_window import _app _app.show_documentation() + def open_looping_window(self, id): + """ open_looping_window(int) -> None + Opens the modal module looping options window for module with given id + """ + from vistrails.gui.vistrails_window import _app + _app.show_looping_options() + def toggle_breakpoint(self, id): """ toggle_breakpoint(int) -> None Toggles the breakpoint attribute for the module with given id diff --git a/vistrails/gui/theme.py b/vistrails/gui/theme.py index b65e0cfde..6e015c2dc 100644 --- a/vistrails/gui/theme.py +++ b/vistrails/gui/theme.py @@ -627,6 +627,15 @@ def __init__(self): vistrails.core.system.vistrails_root_directory() + '/gui/resources/images/multiline_string_icon.png')) + # icons for the port list combination modes + self.DOT_PRODUCT_ICON = QtGui.QIcon(QtGui.QPixmap( + vistrails.core.system.vistrails_root_directory() + + '/gui/resources/images/macro.png')) + + self.CROSS_PRODUCT_ICON = QtGui.QIcon(QtGui.QPixmap( + vistrails.core.system.vistrails_root_directory() + + '/gui/resources/images/remove_param.png')) + #### COLORS #### # Color for the PIP frame self.PIP_FRAME_COLOR = QtGui.QColor( diff --git a/vistrails/gui/vis_diff.py b/vistrails/gui/vis_diff.py index 831402a0a..43804cfce 100644 --- a/vistrails/gui/vis_diff.py +++ b/vistrails/gui/vis_diff.py @@ -292,7 +292,8 @@ def __init__(self, parent=None): g_layout.addWidget(self.legend) legend_group.setLayout(g_layout) layout.addWidget(legend_group) - + layout.setStretch(0,0) + layout.addStretch(1) self.params = QParamTable() params_group = QtGui.QGroupBox("Parameter Changes") g_layout = QtGui.QVBoxLayout() @@ -301,6 +302,28 @@ def __init__(self, parent=None): g_layout.addWidget(self.params) params_group.setLayout(g_layout) layout.addWidget(params_group) + layout.setStretch(2,1000) + + self.cparams = QParamTable() + params_group = QtGui.QGroupBox("Control Parameter Changes") + g_layout = QtGui.QVBoxLayout() + g_layout.setMargin(0) + g_layout.setSpacing(0) + g_layout.addWidget(self.cparams) + params_group.setLayout(g_layout) + layout.addWidget(params_group) + layout.setStretch(3,1000) + + self.annotations = QParamTable() + params_group = QtGui.QGroupBox("Annotation Changes") + g_layout = QtGui.QVBoxLayout() + g_layout.setMargin(0) + g_layout.setSpacing(0) + g_layout.addWidget(self.annotations) + params_group.setLayout(g_layout) + layout.addWidget(params_group) + layout.setStretch(3,1000) + self.setLayout(layout) self.addButtonsToToolbar() @@ -341,8 +364,6 @@ def set_diff(self): return ((vistrail_a, version_a), (vistrail_b, version_b)) = \ self.controller.current_diff_versions - (p1, p2, v1Andv2, heuristicMatch, v1Only, v2Only, paramChanged) = \ - self.controller.current_diff # Set up the version name correctly v1_name = vistrail_a.getVersionName(version_a) @@ -367,6 +388,9 @@ def set_diff(self): self.legend.set_names(v1_name, v2_name) self.params.set_names(v1_name, v2_name) + self.cparams.set_names(v1_name, v2_name) + self.annotations.set_names(v1_name, v2_name) + self.update_module() def set_controller(self, controller=None): self.controller = controller @@ -381,11 +405,16 @@ def update_module(self, module=None): """ if module is None or not hasattr(self.controller, 'current_diff'): self.params.model().clearList() + self.params.parent().setVisible(False) + self.cparams.model().clearList() + self.cparams.parent().setVisible(False) + self.annotations.model().clearList() + self.annotations.parent().setVisible(False) return - # Interprete the diff result and setup item models - (p1, p2, v1Andv2, heuristicMatch, v1Only, v2Only, paramChanged) = \ - self.controller.current_diff + # Interpret the diff result and setup item models + (p1, p2, v1Andv2, heuristicMatch, v1Only, v2Only, paramChanged, + cparamChanged, annotChanged) = self.controller.current_diff # # Set the window title # if id>self.maxId1: @@ -397,15 +426,18 @@ def update_module(self, module=None): # FIXME set the module name/package info? - # Clear the old inspector - param_model = self.params.model() - # annotations = self.inspector.annotationsTab.model() - param_model.clearList() - # annotations.clearList() + to_text = lambda x:'%s(%s)' % (x[0], ','.join(v[1] for v in x[1])) + self.setTable(module, paramChanged, self.params, to_text) + to_text = lambda x:'%s(%s)' % (x[0], x[1]) + self.setTable(module, cparamChanged, self.cparams, to_text) + self.setTable(module, annotChanged, self.annotations, to_text) + def setTable(self, module, changed, table, to_text): # Find the parameter changed module + model = table.model() + model.clearList() matching = None - for ((m1id, m2id), paramMatching) in paramChanged: + for ((m1id, m2id), paramMatching) in changed: if m1id == module.id: #print "found match" matching = paramMatching @@ -414,26 +446,24 @@ def update_module(self, module=None): #print "matching:", matching # If the module has no parameter changed, just display nothing if not matching: + table.parent().setVisible(False) return + table.parent().setVisible(True) + # Else just layout the diff on a table - param_model.insertRows(0,len(matching)) + model.insertRows(0,len(matching)) currentRow = 0 for (f1, f2) in matching: if f1[0]!=None: - param_model.setData( - param_model.index(currentRow, 0), - '%s(%s)' % (f1[0], ','.join(v[1] for v in f1[1]))) + model.setData(model.index(currentRow, 0), to_text(f1)) if f2[0]!=None: - param_model.setData( - param_model.index(currentRow, 1), - '%s(%s)' % (f2[0], ','.join(v[1] for v in f2[1]))) + model.setData(model.index(currentRow, 1), to_text(f2)) if f1==f2: - param_model.disableRow(currentRow) + model.disableRow(currentRow) currentRow += 1 - self.params.resizeRowsToContents() - # self.inspector.annotationsTab.resizeRowsToContents() + table.resizeRowsToContents() class QDiffView(QPipelineView): def __init__(self, parent=None): @@ -523,8 +553,8 @@ def set_diff(self, version_a, version_b, vistrail_b=None): self.set_diff_version_names() self.diff = vistrails.core.db.io.get_workflow_diff(*self.diff_versions) # self.controller.vistrail.get_pipeline_diff(version_a, version_b) - (p1, p2, v1Andv2, heuristicMatch, v1Only, v2Only, paramChanged) = \ - self.diff + (p1, p2, v1Andv2, heuristicMatch, v1Only, v2Only, paramChanged, + cparamChanged, annotChanged) = self.diff # print " $$$ v1Andv2:", v1Andv2 # print " $$$ heuristicMatch:", heuristicMatch # print " $$$ v1Only", v1Only @@ -637,10 +667,11 @@ def set_diff(self, version_a, version_b, vistrail_b=None): p_both.add_module(copy.copy(p1.modules[m1id])) # Then add parameter changed version - for ((m1id, m2id), matching) in paramChanged: + inChanged = set([m for (m, matching) + in chain(paramChanged, cparamChanged, annotChanged)]) + for (m1id, m2id) in inChanged: m1 = p1.modules[m1id] m2 = p2.modules[m2id] - sum1_x += p1.modules[m1id].location.x sum1_y += p1.modules[m1id].location.y sum2_x += p2.modules[m2id].location.x @@ -723,7 +754,7 @@ def set_diff(self, version_a, version_b, vistrail_b=None): for (m1id, m2id) in heuristicMatch: v1Tov2[m1id] = m2id v2Tov1[m2id] = m1id - for ((m1id, m2id), matching) in paramChanged: + for (m1id, m2id) in inChanged: v1Tov2[m1id] = m2id v2Tov1[m2id] = m1id diff --git a/vistrails/gui/vistrails_window.py b/vistrails/gui/vistrails_window.py index dcfc4d2a1..b43d179a6 100644 --- a/vistrails/gui/vistrails_window.py +++ b/vistrails/gui/vistrails_window.py @@ -1022,6 +1022,7 @@ def init_palettes(self): from vistrails.gui.debugger import QDebugger from vistrails.gui.module_configuration import QModuleConfiguration from vistrails.gui.module_documentation import QModuleDocumentation + from vistrails.gui.module_iteration import QModuleIteration from vistrails.gui.module_palette import QModulePalette from vistrails.gui.module_info import QModuleInfo from vistrails.gui.paramexplore.param_view import QParameterView @@ -1086,6 +1087,9 @@ def init_palettes(self): (('controller_changed', 'set_controller'), ('module_changed', 'update_module'), ('descriptor_changed', 'update_descriptor'))), + ((QModuleIteration, True), + (('controller_changed', 'set_controller'), + ('module_changed', 'update_module'))), ((QShellDialog, True), (('controller_changed', 'set_controller'),)), ((QDebugger, True), @@ -2437,6 +2441,13 @@ def show_documentation(self): self.qactions[action_name].setChecked(False) self.qactions[action_name].setChecked(True) + def show_looping_options(self): + from vistrails.gui.module_iteration import QModuleIteration + action_name = QModuleIteration.instance().get_title() + # easy way to make sure that looping options window is raised + self.qactions[action_name].setChecked(False) + self.qactions[action_name].setChecked(True) + # def show_group(self): # class DummyController(object): # def __init__(self, pip): diff --git a/vistrails/packages/controlflow/fold.py b/vistrails/packages/controlflow/fold.py index 16b03bb59..4a7576dc1 100644 --- a/vistrails/packages/controlflow/fold.py +++ b/vistrails/packages/controlflow/fold.py @@ -33,6 +33,7 @@ ## ############################################################################### from vistrails.core import debug +from vistrails.core.modules.basic_modules import create_constant, get_module from vistrails.core.modules.vistrails_module import Module, ModuleError, \ ModuleConnector, InvalidOutput, ModuleSuspended, ModuleWasSuspended from vistrails.core.modules.basic_modules import Boolean, String, Integer, \ @@ -173,7 +174,7 @@ def updateFunctionPort(self): module.upToDate = False module.computed = False - self.setInputValues(module, nameInput, element) + self.setInputValues(module, nameInput, element, i) loop.begin_iteration(module, i) @@ -205,69 +206,6 @@ def updateFunctionPort(self): children=suspended) loop.end_loop_execution() - def setInputValues(self, module, inputPorts, elementList): - """ - Function used to set a value inside 'module', given the input port(s). - """ - for element, inputPort in izip(elementList, inputPorts): - ## Cleaning the previous connector... - if inputPort in module.inputPorts: - del module.inputPorts[inputPort] - new_connector = ModuleConnector(create_constant(element), 'value') - module.set_input_port(inputPort, new_connector) - - def typeChecking(self, module, inputPorts, inputList): - """ - Function used to check if the types of the input list element and of the - inputPort of 'module' match. - """ - for elementList in inputList: - if len(elementList) != len(inputPorts): - raise ModuleError(self, - 'The number of input values and input ports ' - 'are not the same.') - for element, inputPort in izip(elementList, inputPorts): - p_modules = module.moduleInfo['pipeline'].modules - p_module = p_modules[module.moduleInfo['moduleId']] - port_spec = p_module.get_port_spec(inputPort, 'input') - v_module = get_module(element, port_spec.signature) - if v_module is not None: - if not self.compare(port_spec, v_module, inputPort): - raise ModuleError(self, - 'The type of a list element does ' - 'not match with the type of the ' - 'port %s.' % inputPort) - - del v_module - else: - break - - def createSignature(self, v_module): - """ - ` Function used to create a signature, given v_module, for a port spec. - """ - if isinstance(v_module, tuple): - v_module_class = [] - for module_ in v_module: - v_module_class.append(self.createSignature(module_)) - return v_module_class - else: - return v_module - - def compare(self, port_spec, v_module, port): - """ - Function used to compare two port specs. - """ - port_spec1 = port_spec - - reg = get_module_registry() - - v_module = self.createSignature(v_module) - port_spec2 = PortSpec(**{'signature': v_module}) - matched = reg.are_specs_matched(port_spec2, port_spec1) - - return matched - def compute(self): """The compute method for the Fold.""" @@ -278,48 +216,3 @@ def compute(self): self.updateFunctionPort() self.set_output('Result', self.partialResult) - -############################################################################### - -class NewConstant(Constant): - """ - A new Constant module to be used inside the FoldWithModule module. - """ - def setValue(self, v): - self.set_output("value", v) - self.upToDate = True - -def create_constant(value): - """ - Creates a NewConstant module, to be used for the ModuleConnector. - """ - constant = NewConstant() - constant.setValue(value) - return constant - -def get_module(value, signature): - """ - Creates a module for value, in order to do the type checking. - """ - if isinstance(value, Constant): - return type(value) - elif isinstance(value, bool): - return Boolean - elif isinstance(value, str): - return String - elif isinstance(value, int): - return Integer - elif isinstance(value, float): - return Float - elif isinstance(value, list): - return List - elif isinstance(value, tuple): - v_modules = () - for element in xrange(len(value)): - v_modules += (get_module(value[element], signature[element]),) - return v_modules - else: # pragma: no cover - debug.warning("Could not identify the type of the list element.") - debug.warning("Type checking is not going to be done inside" - "FoldWithModule module.") - return None diff --git a/vistrails/packages/controlflow/looping.py b/vistrails/packages/controlflow/looping.py index 6a7206a4f..93693fe3c 100644 --- a/vistrails/packages/controlflow/looping.py +++ b/vistrails/packages/controlflow/looping.py @@ -1,12 +1,21 @@ +from base64 import b16encode, b16decode import copy from itertools import izip import time from vistrails.core.modules.vistrails_module import Module, InvalidOutput, \ ModuleError, ModuleConnector, ModuleSuspended, ModuleWasSuspended +from vistrails.core.utils import xor, long2bytes from fold import create_constant +try: + import hashlib + sha1_hash = hashlib.sha1 +except ImportError: + import sha + sha1_hash = sha.new + class While(Module): """ @@ -103,6 +112,12 @@ def compute(self): create_constant(value), 'value') module.set_input_port(port, new_connector) + # Affix a fake signature on the module + inputPort_hash = sha1_hash() + inputPort_hash.update(port) + module.signature = b16encode(xor( + b16decode(self.signature.upper()), + inputPort_hash.digest())) loop.begin_iteration(module, i) @@ -211,6 +226,13 @@ def compute(self): create_constant(i), 'value') module.set_input_port(name_input, new_connector) + # Affix a fake signature on the module + inputPort_hash = sha1_hash() + inputPort_hash.update(name_input) + module.signature = b16encode(xor( + b16decode(self.signature.upper()), + long2bytes(i, 20), + inputPort_hash.digest())) loop.begin_iteration(module, i) diff --git a/vistrails/packages/parallelflow/map.py b/vistrails/packages/parallelflow/map.py index 7b4a3e4bd..8bdb5ea03 100644 --- a/vistrails/packages/parallelflow/map.py +++ b/vistrails/packages/parallelflow/map.py @@ -18,8 +18,10 @@ from vistrails.core.log.module_exec import ModuleExec from vistrails.core.log.group_exec import GroupExec from vistrails.core.log.machine import Machine +from vistrails.core.utils import xor, long2bytes from vistrails.db.domain import IdScope +from base64 import b16encode, b16decode import copy import inspect from itertools import izip @@ -32,6 +34,13 @@ from .api import get_client +try: + import hashlib + sha1_hash = hashlib.sha1 +except ImportError: + import sha + sha1_hash = sha.new + ############################################################################### # This function is sent to the engines which execute it @@ -229,7 +238,7 @@ def updateFunctionPort(self): # checking type and setting input in the module self.typeChecking(connector.obj, nameInput, inputList) - self.setInputValues(connector.obj, nameInput, element) + self.setInputValues(connector.obj, nameInput, element, i) pipeline_db_module = original_pipeline.modules[module_id].do_copy() @@ -458,7 +467,7 @@ def process_group(group): return serialize(pipeline) - def setInputValues(self, module, inputPorts, elementList): + def setInputValues(self, module, inputPorts, elementList, iteration): """ Function used to set a value inside 'module', given the input port(s). """ @@ -468,6 +477,13 @@ def setInputValues(self, module, inputPorts, elementList): del module.inputPorts[inputPort] new_connector = ModuleConnector(create_constant(element), 'value') module.set_input_port(inputPort, new_connector) + # Affix a fake signature on the module + inputPort_hash = sha1_hash() + inputPort_hash.update(inputPort) + module.signature = b16encode(xor( + b16decode(self.signature.upper()), + long2bytes(iteration, 20), + inputPort_hash.digest())) def typeChecking(self, module, inputPorts, inputList): """ diff --git a/vistrails/tests/resources/test-implicit-while.vt b/vistrails/tests/resources/test-implicit-while.vt new file mode 100644 index 000000000..e405014c6 Binary files /dev/null and b/vistrails/tests/resources/test-implicit-while.vt differ diff --git a/vistrails/tests/resources/test-list-custom.vt b/vistrails/tests/resources/test-list-custom.vt new file mode 100644 index 000000000..f94082f5b Binary files /dev/null and b/vistrails/tests/resources/test-list-custom.vt differ diff --git a/vistrails/tests/resources/test-streaming.vt b/vistrails/tests/resources/test-streaming.vt new file mode 100644 index 000000000..ee6b74e9a Binary files /dev/null and b/vistrails/tests/resources/test-streaming.vt differ