diff --git a/odmtools/controller/frmDataTable.py b/odmtools/controller/frmDataTable.py index ce6f008..be6ccbe 100644 --- a/odmtools/controller/frmDataTable.py +++ b/odmtools/controller/frmDataTable.py @@ -13,17 +13,14 @@ def __init__(self, parent, **kwargs): self.memDB = None DataTable.__init__(self, parent, **kwargs) + def init(self, memDB): + self.memDB = memDB + self.olvDataTable.init(self.memDB) - def init_publishers(self): Publisher.subscribe(self.onChangeSelection, "changeTableSelection") Publisher.subscribe(self.onRefresh, "refreshTable") Publisher.subscribe(self.olvDataTable.onDeselectAll, "deselectAllDataTable") - def init(self, memDB): - self.memDB = memDB - self.olvDataTable.init(self.memDB) - self.init_publishers() - def onItemSelected(self, event): pass diff --git a/odmtools/controller/olvDataTable.py b/odmtools/controller/olvDataTable.py index 112f4d7..1b4ff44 100644 --- a/odmtools/controller/olvDataTable.py +++ b/odmtools/controller/olvDataTable.py @@ -20,6 +20,8 @@ def __init__(self, parent, **kwargs): self.sortedColumnIndex = -1 self.currentItem = None self.dataframe = None + self.annotations = None + self.annotations_grouped = {} def init(self, memDB): self.memDB = memDB @@ -28,21 +30,62 @@ def init(self, memDB): self.oddRowsBackColor = wx.Colour(191, 217, 217) self.dataframe = self.memDB.getDataValuesDF() - sort_by_index = list(self.dataframe.columns).index("valuedatetime") + self.annotations = self.memDB.get_annotations() + + sort_by_index = self.dataframe.columns.tolist().index("valuedatetime") self.dataframe.sort_values(self.dataframe.columns[sort_by_index], inplace=True) - self.dataObjects = self.dataframe.values.tolist() + + self.annotations_grouped = self.__group_annotations() + self.dataObjects = self.__merge_dataframe_with_annotations() + + col = self.memDB.get_columns_with_annotations() + columns = \ [ColumnDefn(x.strip(), align="left", valueGetter=i, minimumWidth=125, width=125, - stringConverter='%Y-%m-%d %H:%M:%S' if "valuedatetime" == x.lower() else '%s') - for x, i in self.memDB.getEditColumns()] + stringConverter='%Y-%m-%d %H:%M:%S' if "valuedatetime" == x.lower() else '%s') + for x, i in col] + self.SetColumns(columns) self.SetObjectGetter(self.ObjectGetter) - self.SetItemCount(len(self.dataframe)) + self.SetItemCount(len(self.dataObjects)) + + def __merge_dataframe_with_annotations(self): + data_list = self.dataframe.values.tolist() + data = data_list + if self.annotations_grouped: + for key, value in self.annotations_grouped.iteritems(): + for i in range(0, len(data_list)): + if key in data[i]: + data[i].append(value) + break + + return data + + def __group_annotations(self): + """ + Ideally, this method should only be called once. Use self.grouped_annotations after calling this method + :return: + """ + if (self.annotations): + anno_list = self.annotations.values.tolist() + + anno = {} + for i in range(0, len(anno_list)): + value_id = anno_list[i][1] + annotation_code = anno_list[i][-1] + if value_id in anno: + anno[value_id].append(annotation_code) + else: + anno[value_id] = [annotation_code] + + return anno + else: + return None def EnableSorting(self): - self.Bind(wx.EVT_LIST_COL_CLICK, self.onColSelected) + self.Bind(wx.EVT_LIST_COL_CLICK, self.on_column_selected) if not self.smallImageList: self.SetImageLists() if (not self.smallImageList.HasName(ObjectListView.NAME_DOWN_IMAGE) and @@ -56,40 +99,58 @@ def ObjectGetter(self, index): """ return self.dataObjects[index % len(self.dataObjects)] - def onColSelected(self, evt): + def on_column_selected(self, event): """ Allows users to sort by clicking on columns """ - if isinstance(self.dataframe, pd.DataFrame): - if self.dataframe.empty: - return - else: - if not self.dataframe: - return + if not isinstance(self.dataframe, pd.DataFrame): + return + + if self.dataframe.empty: + return + + if not len(self.dataObjects): + return - logger.debug("Column: %s" % evt.m_col) - self.sortColumn(evt.m_col) + self.sortColumn(event.Column) def sortColumn(self, selected_column): + self.sortAscending = not self.sortAscending + oldSortColumnIndex = self.sortedColumnIndex self.sortedColumnIndex = selected_column - ascending = self.sortAscending - if ascending: - self.dataframe.sort_values(self.dataframe.columns[selected_column], inplace=True) - self.sortAscending = False - elif not ascending: - self.dataframe.sort_values(self.dataframe.columns[selected_column], ascending=False, inplace=True) - self.sortAscending = True self._UpdateColumnSortIndicators(selected_column, oldSortColumnIndex) - self.dataObjects = self.dataframe.values.tolist() - if self.GetItemCount: + if selected_column >= len(self.dataframe.columns): + self.dataObjects = self.sort_columns_by_annotation_code(reverse=self.sortAscending) + else: + self.dataframe.sort_values(self.dataframe.columns[selected_column], ascending=self.sortAscending, inplace=True) + self.dataObjects = self.__merge_dataframe_with_annotations() + + if self.GetItemCount(): itemFrom = self.GetTopItem() itemTo = self.GetTopItem() + 1 + self.GetCountPerPage() itemTo = min(itemTo, self.GetItemCount() - 1) self.RefreshItems(itemFrom, itemTo) + def sort_columns_by_annotation_code(self, reverse=False): + rows_with_annotation = [] + rows_without_annotation = [] + + column_number_of_dataframe = len(self.dataframe.columns) + + for i in self.dataObjects: + if len(i) > column_number_of_dataframe: + rows_with_annotation.append(i) + else: + rows_without_annotation.append(i) + + if reverse: + return rows_without_annotation + rows_with_annotation + else: + return rows_with_annotation + rows_without_annotation + def onItemSelected(self, event): """ diff --git a/odmtools/gui/mnuRibbon.py b/odmtools/gui/mnuRibbon.py index 6c21874..eede1aa 100644 --- a/odmtools/gui/mnuRibbon.py +++ b/odmtools/gui/mnuRibbon.py @@ -129,6 +129,7 @@ def _init_ctrls(self, prnt): # ------------------------------------------------------------------------------- editPage = RB.RibbonPage(self, wx.ID_ANY, "Edit") + # editPage.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) main_panel = RB.RibbonPanel(editPage, wx.ID_ANY, "Main", wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, RB.RIBBON_PANEL_NO_AUTO_MINIMISE) @@ -204,14 +205,28 @@ def _init_ctrls(self, prnt): self.CurrPage = 1 self.SetActivePageByIndex(self.CurrPage) - self.bindEvents() + self.__bind_events() self.initPubSub() def __init__(self, parent, id, name): self.parent = parent self._init_ctrls(parent) - def bindEvents(self): + def on_mouse_enter(self, event): + ribbon_panel = event.GetEventObject().GetParent() + ribbon_panel._hovered = True + + self.Refresh() + event.Skip() + + def on_mouse_leave(self, event): + ribbon_panel = event.GetEventObject().GetParent() + ribbon_panel._hovered = False + + self.Refresh() + event.Skip() + + def __bind_events(self): ###Docking Window Selection self.Bind(RB.EVT_RIBBONBUTTONBAR_CLICKED, self.onDocking, id=wxID_RIBBONVIEWTABLE) self.Bind(RB.EVT_RIBBONBUTTONBAR_CLICKED, self.onDocking, id=wxID_RIBBONVIEWSERIES) @@ -262,6 +277,24 @@ def bindEvents(self): ###Ribbon Event self.Bind(RB.EVT_RIBBONBAR_PAGE_CHANGED, self.onFileMenu, id=wxID_PANEL1) + # ENTER + self.main_bar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 1 + self.edit_bar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 2 + self.record_bar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 3 + self.PlotsOptions_bar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 4 + self.plots_bar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 5 + self.dateTime_buttonbar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 6 + self.scriptBar.Bind(wx.EVT_ENTER_WINDOW, self.on_mouse_enter) # 7 + + # LEAVE + self.main_bar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 1 + self.edit_bar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 2 + self.record_bar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 3 + self.PlotsOptions_bar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 4 + self.plots_bar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 5 + self.dateTime_buttonbar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 6 + self.scriptBar.Bind(wx.EVT_LEAVE_WINDOW, self.on_mouse_leave) # 7 + def initPubSub(self): Publisher.subscribe(self.toggleEditButtons, "EnableEditButtons") Publisher.subscribe(self.enableButtons, "EnablePlotButtons") diff --git a/odmtools/gui/wizSave.py b/odmtools/gui/wizSave.py index 102a81f..a057ad2 100644 --- a/odmtools/gui/wizSave.py +++ b/odmtools/gui/wizSave.py @@ -189,18 +189,19 @@ def on_wizard_finished(self, event): site, variable, method, action, proc_level = self.get_metadata() #if qcl exits use its its closeSuccessful = False - - rbSave = self.pgIntro.pnlIntroduction.rbSave.GetValue() - rbSaveAsNew = self.pgIntro.pnlIntroduction.rbSaveAs.GetValue() - rbSaveAsExisting = self.pgIntro.pnlIntroduction.rbSaveExisting.GetValue() - if rbSaveAsExisting: - append = self.pgExisting.pnlExisting.rbAppend.GetValue() - overwrite = self.pgExisting.pnlExisting.rbOverwrite.GetValue() - if append: - original = self.pgExisting.pnlExisting.rbOriginal.GetValue() - new = self.pgExisting.pnlExisting.rbNew.GetValue() - - if proc_level.ProcessingLevelID == 0 and not rbSaveAsNew: + saveSuccessful=False + + self.rbSave = self.pgIntro.pnlIntroduction.rbSave.GetValue() + self.rbSaveAsNew = self.pgIntro.pnlIntroduction.rbSaveAs.GetValue() + self.rbSaveAsExisting = self.pgIntro.pnlIntroduction.rbSaveExisting.GetValue() + if self.rbSaveAsExisting: + self.append = self.pgExisting.pnlExisting.rbAppend.GetValue() + self.overwrite = self.pgExisting.pnlExisting.rbOverwrite.GetValue() + if self.append: + self.original = self.pgExisting.pnlExisting.rbOriginal.GetValue() + self.new = self.pgExisting.pnlExisting.rbNew.GetValue() + + if proc_level.ProcessingLevelID == 0 and not self.rbSaveAsNew: """ If we're looking at a QCL with Control level 0 and the following cases: Save @@ -212,19 +213,19 @@ def on_wizard_finished(self, event): wx.YES_NO | wx.ICON_QUESTION) if val == 2: logger.info("User selected yes to save a level 0 dataset") - val_2 = wx.MessageBox("This interactive_item cannot be undone.\nAre you sure you are sure?\n", + val_2 = wx.MessageBox("This cannot be undone.\nAre you sure you are sure?\n", 'Are you REALLY sure?', wx.YES_NO | wx.ICON_QUESTION) if val_2 == 2: closeSuccessful = True - elif rbSaveAsExisting: + elif self.rbSaveAsExisting: keyword = "overwrite" if self.pgExisting.pnlExisting.rbAppend.GetValue(): keyword = "append to" - message = "You are about to " + keyword + " an existing series_service,\nthis interactive_item cannot be undone.\nWould you like to continue?\n" + message = "You are about to " + keyword + " an existing series_service,\nthis cannot be undone.\nWould you like to continue?\n" cont = wx.MessageBox(message, 'Are you sure?', wx.YES_NO | wx.ICON_QUESTION) if cont == 2: closeSuccessful = True @@ -234,95 +235,80 @@ def on_wizard_finished(self, event): closeSuccessful = True if closeSuccessful: - #if qcl exists use its id - # if self.series_service.qcl_exists(QCL): - # if QCL == self.currSeries.quality_control_level: - # QCL = None - # else: - # QCL = self.record_service.get_qcl(QCL) - # else: - # QCL = self.record_service.create_processing_level(QCL.code, QCL.definition, QCL.explanation) - if self.series_service.get_processing_level_by_code(proc_level.ProcessingLevelCode) is None: - proc_level = self.series_service.create_processing_level(proc_level.ProcessingLevelCode, proc_level.Definition, proc_level.Explanation) - elif proc_level.ProcessingLevelCode == self.__processing_level_from_series.ProcessingLevelCode: - proc_level = None - else: - proc_level = self.series_service.get_processing_level_by_code(proc_level.ProcessingLevelCode) - - - #if variable exists use its id - # if self.series_service.variable_exists(Variable): - # Variable = self.record_service.get_variable(Variable) - # else: - # Variable = self.record_service.create_variable(Variable) - if self.series_service.get_variable_by_code(variable.VariableCode) is None: - variable = self.series_service.create_variable_by_var(variable) - else: - variable = self.series_service.get_variable_by_code(variable.VariableCode) - - - #if method exists use its id - # if self.series_service.method_exists(Method): - # if Method == self.currSeries.method: - # Method = None - # else: - # Method = self.record_service.get_method(Method) - # else: - # Method = self.record_service.create_method(Method) - if self.series_service.get_method_by_code(method.MethodCode) is None: - method = self.series_service.create_method(method.MethodDescription, method.MethodLink) - elif method == self.__method_from_series: - method = None - else: - method = self.series_service.get_method_by_code(method.MethodCode) - - # initiate either "Save as" or "Save" - ''' - if self.page1.pnlIntroduction.rbSave.GetValue(): - result = self.record_service.save(Variable, Method, QCL, False) - else: - result = self.record_service.saveAs(Variable, Method, QCL, True) - ''' - affiliation = self.action_page.get_affiliation() - - action_by = ActionBy() - #action_by.ActionID = action.ActionID - action_by.RoleDescription = self.action_page.action_view.role_description_text_box.GetValue() - action_by.AffiliationID = affiliation.AffiliationID - action_by.AffiliationObj = affiliation - - # result = self.series_service.getResult(var=variable, meth=method, proc=proc_level, action=action, actionby=action_by) - result = self.pgExisting.pnlExisting.olvSeriesList.GetSelectedObject().ResultObj - - #result = self.record_service._edit_service.getResult(var=variable, meth=method, proc=proc_level, action=action, actionby=action_by) - try: - if rbSave: - result = self.record_service.save() - elif rbSaveAsNew: - result = self.record_service.save_as(variable=variable, method=method, proc_level=proc_level, - action=action, action_by=action_by) - elif rbSaveAsExisting: - if overwrite: - result = self.record_service.save_existing(result=result) - elif append: - #TODO send in just the result - #def save_appending(self, var = None, method =None, qcl = None, overwrite = False): - #TODO if i require that original or new is selected I can call once with overwrite = original - if original: - result = self.record_service.save_appending(result=result, overwrite=False) - elif new: - result = self.record_service.save_appending(result=result, overwrite=True) - - Publisher.sendMessage("refreshSeries") - - #self.page1.pnlIntroduction.rb + saveSuccessful = self.try_to_save(variable, method, proc_level, action) except Exception as e: message = "Save was unsuccessful %s" % e.message logger.error(message) wx.MessageBox(message, "Error!", wx.ICON_ERROR | wx.ICON_EXCLAMATION) + saveSuccessful=False + + if saveSuccessful: event.Skip() self.Close() + self.Destroy() + + def create_needed_meta(self, proc_level,variable, method): + if self.series_service.get_processing_level_by_code(proc_level.ProcessingLevelCode) is None: + proc_level = self.series_service.create_processing_level(proc_level.ProcessingLevelCode, proc_level.Definition, proc_level.Explanation) + elif proc_level.ProcessingLevelCode == self.__processing_level_from_series.ProcessingLevelCode: + proc_level = None + else: + proc_level = self.series_service.get_processing_level_by_code(proc_level.ProcessingLevelCode) + + + + if self.series_service.get_variable_by_code(variable.VariableCode) is None: + variable = self.series_service.create_variable_by_var(variable) + else: + variable = self.series_service.get_variable_by_code(variable.VariableCode) + + + + if self.series_service.get_method_by_code(method.MethodCode) is None: + method = self.series_service.create_method(method.MethodDescription, method.MethodLink) + elif method == self.__method_from_series: + method = None + else: + method = self.series_service.get_method_by_code(method.MethodCode) + + + def try_to_save(self, variable, method, proc_level, action): + self.create_needed_meta(proc_level, variable, method) + affiliation = self.action_page.get_affiliation() + + action_by = ActionBy() + # action_by.ActionID = action.ActionID + action_by.RoleDescription = self.action_page.action_view.role_description_text_box.GetValue() + action_by.AffiliationID = affiliation.AffiliationID + action_by.AffiliationObj = affiliation + + # result = self.series_service.getResult(var=variable, meth=method, proc=proc_level, action=action, actionby=action_by) + result = self.pgExisting.pnlExisting.olvSeriesList.GetSelectedObject().ResultObj + + if self.rbSave: + result = self.record_service.save() + elif self.rbSaveAsNew: + result = self.record_service.save_as(variable=variable, method=method, proc_level=proc_level, + action=action, action_by=action_by) + elif self.rbSaveAsExisting: + if self.overwrite: + result = self.record_service.save_existing(result=result) + elif self.append: + #TODO send in just the result + #def save_appending(self, var = None, method =None, qcl = None, overwrite = False): + #TODO if i require that original or new is selected I can call once with overwrite = original + if self.original: + result = self.record_service.save_appending(result=result, overwrite=False) + elif self.new: + result = self.record_service.save_appending(result=result, overwrite=True) + + Publisher.sendMessage("refreshSeries") + return True + + + + diff --git a/odmtools/odmdata/memory_database.py b/odmtools/odmdata/memory_database.py index 813fbe9..e5d8c14 100644 --- a/odmtools/odmdata/memory_database.py +++ b/odmtools/odmdata/memory_database.py @@ -3,7 +3,6 @@ from sqlalchemy import bindparam from odmtools.common.logger import LoggerTool -from odmtools.odmservices import SeriesService from odmtools.odmservices import ServiceManager, SeriesService # from odmtools.odmdata import SeriesService#ODM @@ -12,11 +11,10 @@ from odm2api.ODM2.models import setSchema import pandas as pd - logger =logging.getLogger('main') class MemoryDatabase(object): - ### this code should be changed to work with the database abstract layer so that sql queries are not in the code + ## this code should be changed to work with the database abstract layer so that sql queries are not in the code # series_service is a SeriesService def __init__(self, taskserver=None): @@ -25,6 +23,7 @@ def __init__(self, taskserver=None): self.df = None # Series_Service handles remote database self.series_service = None + self.results_annotations = None # Memory_service handles in memory database sm = ServiceManager() @@ -43,18 +42,15 @@ def __init__(self, taskserver=None): #self.annotation_list = pd.DataFrame() columns =['ResultID', 'ValueDateTime', 'ValueID', 'AnnotationID') #send in engine - def reset_edit(self): sm = ServiceManager() self.mem_service = sm.get_series_service(conn_string="sqlite:///:memory:") + self.annotation_list = pd.DataFrame() setSchema(self.mem_service._session_factory.engine) - def set_series_service(self, service): self.series_service = service - - ############## # DB Queries ############## @@ -77,6 +73,14 @@ def getDataValuesDF(self): logging.debug("done updating memory dataframe") return self.df + def get_annotations(self, query_db_again=False): + # self.mem_service._session.commit() + setSchema(self.series_service._session_factory.engine) + if self.results_annotations is None or query_db_again: + result_id = self.df.resultid[0] + annotation = self.series_service.get_annotations_by_result(resultid=result_id) + self.results_annotations = annotation + def getDataValues(self): # TODO: fix me! this commit location is only temoporarily. should be flushing so that we can restore self.mem_service._session.commit() @@ -94,6 +98,27 @@ def getEditColumns(self): return [(x, i) for (i, x) in enumerate(columns)] # return [(x, i) for (i, x) in enumerate(self.df.columns)] + def get_columns_with_annotations(self): + """ + If results_annotations has not been set then + :return: + """ + + if self.results_annotations is None or self.df is None: + print "self.df and self.results_annotations must be a pandas dataframe. Currently they are None" + return [] + + columns = [] + columns.extend(self.df.columns.tolist()) + + annotation_columns = self.results_annotations.columns.tolist() + index = annotation_columns.index("annotationcode") + annotation_code_column = annotation_columns[index] + + columns.append(annotation_code_column) + + return [(x, i) for (i, x) in enumerate(columns)] + def getDataValuesforGraph(self, seriesID, noDataValue, startDate=None, endDate=None): return self.series_service.get_plot_values(seriesID, noDataValue, startDate, endDate) @@ -165,11 +190,11 @@ def chunking(self, data): def updateFlag(self, ids, value): - flags = pd.DataFrame(columns = ['AnnotationID', 'DateTime', 'ResultID', 'ValueID']) - flags["DateTime"] = ids - flags["AnnotationID"] = value - flags["ResultID"] = self.series.ResultID - flags["ValueID"] = None + flags = pd.DataFrame(columns = ['annotationid', 'valuedatetime', 'resultid', 'valueid']) + flags["valuedatetime"] = ids + flags["annotationid"] = value + flags["resultid"] = self.series.ResultID + flags["valueid"] = None #what if the column already exists @@ -250,7 +275,7 @@ def initEditValues(self, seriesID): logger.debug("Load series from db") self.series = self.series_service.get_series(seriesID) - self.df = self.series_service.get_values(series_id= seriesID) + self.df = self.series_service.get_values(series_id=seriesID) self.editLoaded = True diff --git a/odmtools/odmservices/edit_service.py b/odmtools/odmservices/edit_service.py index fa65edc..439a694 100644 --- a/odmtools/odmservices/edit_service.py +++ b/odmtools/odmservices/edit_service.py @@ -498,193 +498,236 @@ def restore(self): self.reset_filter() def save(self, result=None): - values = self.memDB.getDataValuesDF() + try: + values = self.memDB.getDataValuesDF() - if not result: - result = self.memDB.series_service.get_series(series_id = values['resultid'][0]) - else: - values["resultid"] = result.ResultID - - # update result - result.ValueCount = 0 - self.updateResult(result) - # upsert values - self.memDB.series_service.upsert_values(values) - # save new annotations - self.add_annotations(self.memDB.annotation_list) - return result + if not result: + result = self.memDB.series_service.get_series(series_id = values['resultid'][0]) + else: + values["resultid"] = result.ResultID + + # update result + result.ValueCount = 0 + self.updateResult(result) + # upsert values + self.memDB.series_service.upsert_values(values) + # save new annotations + if len(self.memDB.annotation_list >0): + self.add_annotations(self.memDB.annotation_list) + return result + except Exception as e: + logger.error("Exception encountered while saving: {}".format(e)) + raise e + return None def save_existing(self, result): result = self.save(result) return result def save_appending(self, result, overwrite=True): - values = self.memDB.getDataValuesDF() + try: - # get value count - vc = result.ValueCount - # set in df - values["resultid"] = result.ResultID - - # count = overlap calc - count = self.overlapcalc(result, values, overwrite) - # set value count = res.vc+valuecount-count - valuecount = result.ValueCount + vc - count - # update result - self.updateResult(result, valuecount) - # insert values - self.memDB.series_service.upsert_values(values) - # save new annotations - self.add_annotations(self.memDB.annotation_list) + values = self.memDB.getDataValuesDF() - return result + # get value count + vc = result.ValueCount + # set in df + values["resultid"] = result.ResultID + + # count = overlap calc + count = self.overlapcalc(result, values, overwrite) + # set value count = res.vc+valuecount-count + valuecount = result.ValueCount + vc - count + # update result + self.updateResult(result, valuecount) + # insert values + self.memDB.series_service.upsert_values(values) + # save new annotations + if len(self.memDB.annotation_list >0): + self.add_annotations(self.memDB.annotation_list) + return result + except Exception as e: + logger.error("Exception encountered while performing a save as: {}".format(e)) + raise e + return None def save_as(self, variable, method, proc_level, action, action_by): - #save as new series - values = self.memDB.getDataValuesDF() - # get all annotations for series - annolist= self.memDB.series_service.get_annotations_by_result(str(values["resultid"][0])) - annolist['valueid']=None - # create series - result = self.getResult(variable, method, proc_level, action, action_by) + try: + #save as new series + values = self.memDB.getDataValuesDF() + # get all annotations for series + annolist= self.memDB.series_service.get_annotations_by_result(str(values["resultid"][0])) + annolist['valueid'] = None - # set in df - values["resultid"] = result.ResultID - # insert values - self.memDB.series_service.insert_values(values) + # create series + result = self.getResult(variable, method, proc_level, action, action_by) - # save all annotations - frames = [self.memDB.annotation_list, annolist] - annolist = pd.concat(frames) - self.add_annotations(annolist) + # set in df + values["valueid"] = None + values["resultid"] = result.ResultID + # insert values + self.memDB.series_service.insert_values(values) + #combine all of the annotations new annotations with the existing + frames = [self.memDB.annotation_list, annolist] + annolist = pd.concat(frames) + # save all annotations + if len(annolist > 0): + self.add_annotations(annolist) + + return result + except Exception as e: + logger.error("Exception encountered while performing a save as: {}".format(e)) + raise e - return result def getResult(self, var, meth, proc, action, action_by): - values = self.memDB.getDataValuesDF() + id = self.memDB.getDataValuesDF()["resultid"] + # copy old - result = self.memDB.series_service.get_series(str(values["resultid"][0])) + # what is my original result + result = self.memDB.series_service.get_series(str(id[0])) + sfid = result.FeatureActionObj.SamplingFeatureID + aggcv = result.AggregationStatisticCV + itsp = result.IntendedTimeSpacing + itspunit = result.IntendedTimeSpacingUnitsID + status = result.StatusCV + type = result.ResultTypeCV + units = result.UnitsID + medium = result.SampledMediumCV + self.memDB.series_service._session.expunge(result) # change var, meth proc, in df #intend ts, agg sta if var: result.VariableID = var.VariableID - result.VariableObj = var + if proc: result.ProcessingLevelID = proc.ProcessingLevelID - result.ProcessingLevelObj = proc - if meth: - action.MethodID = meth.MethodID - action.MethodObj = meth.MethodObj + result.ResultID=None + result.ResultUUID = None + #if result does not exist if not self.memDB.series_service.resultExists(result): try: #create Action - action.ActionID = None - action.ActionTypeCV = "Derivation" - self.memDB.series_service.read._session.expunge(action.MethodObj.OrganizationObj) - self.memDB.series_service.read._session.expunge(action.MethodObj) - action = self.memDB.series_service.create.createAction(action) # it times out. find out why - print action + if meth: + id = meth.MethodID + # new_action.MethodObj = meth.MethodOb + else: + id = action.MethodID + new_action, action_by = self.memDB.series_service.create_action(id, action.ActionDescription, action.ActionFileLink, action.BeginDateTime, action.BeginDateTimeUTCOffset, action_by) + # create FeatureAction (using current sampling feature id) + feature_action = self.memDB.series_service.createFeatureAction(sfid, new_action.ActionID) - # create Actionby done - action_by.ActionID = action.ActionID - action_by= self.memDB.series_service.create.createActionby(action_by) - print action_by + if var: + varid = var.VariableID + else: + varid = result.VariableID + if proc: + procid= proc.ProcessingLevelID + else: + procid= result.ProcessingLevelID + result = self.memDB.series_service.create_result(varid, procid, feature_action.FeatureActionID, + aggcv, itsp, itspunit, status, type, units, medium) - # create FeatureAction (using current sampling feature id) - sampling_feature = result.FeatureActionObj.SamplingFeatureObj - self.memDB.series_service.read._session.expunge(result.FeatureActionObj.SamplingFeatureObj) - - feature_action = FeatureActions() - feature_action.SamplingFeatureID = sampling_feature.SamplingFeatureID - feature_action.ActionID = action.ActionID - feature_action.ActionObj = action - feature_action.SamplingFeatureObj = sampling_feature - feature_action = self.memDB.series_service.create.createFeatureAction(feature_action) - print feature_action - - # create TimeSeriesResult - this should also contain all of the stuff for the Result - time, offset = self.get_current_time_and_utcoffset() - - result.ResultID = None - result.ResultUUID = None - result.ValueCount = 0 - result.FeatureActionID = feature_action.FeatureActionID - result.ResultDateTime = time - result.ResultDateTimeUTCOffset = offset - result.FeatureActionObj= feature_action - self.memDB.series_service.read._session.expunge(result.ProcessingLevelObj) - self.memDB.series_service.read._session.expunge(result.VariableObj) - self.memDB.series_service.read._session.expunge(result) - - - result = self.memDB.series_service.create.createResult(result) - print result except Exception as ex: + self.memDB.series_service._session.rollback() print ex - return self.updateResult(result) + raise ex + else: + #if saveas called me throw an error that this series already exists + import inspect + (frame, filename, line_number, + function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1] + if function_name =='save_as': + raise Exception("this series already exists, but you have chosen to create a new series") + else: + #it already exists, so get it + result = self.memDB.series_service.get_series_by_meta(result) + + return self.updateResult(result) - def updateResult(self, result, valuecount = -10): + def updateResult(self, result, valuecount=-10): form = "%Y-%m-%d %H:%M:%S" # get pd values = self.memDB.getDataValuesDF() # update count, dates, action = result.FeatureActionObj.ActionObj - action.BeginDateTime= datetime.datetime.strptime(str(np.min(values['valuedatetime'])), form) + action.BeginDateTime = datetime.datetime.strptime(str(np.min(values["valuedatetime"])), form) action.EndDateTime = datetime.datetime.strptime(str(np.max(values["valuedatetime"])), form) - if valuecount > 0 : - result.ValueCount=valuecount + + #TODO how does valuecount change, when do i send it in + if valuecount > 0: + result.ValueCount = valuecount else: result.ValueCount = len(values) - setSchema(self.memDB.series_service._session_factory.engine) - self.memDB.series_service.update.updateResult(result.ResultID, result.ValueCount) - self.memDB.series_service.update.updateAction(actionID=action.ActionID, begin=action.BeginDateTime, end=action.EndDateTime) - + self.memDB.series_service.update_result(result=result) + self.memDB.series_service.update_action(action=action) return result def overlapcalc(self, result, values, overwrite): form = "%Y-%m-%d %H:%M:%S" - #is there any overlap + # is there any overlap dbend = result.FeatureActionObj.ActionObj.EndDateTime dfstart = datetime.datetime.strptime(str(np.min(values["valuedatetime"])), form) - overlap = dbend>= dfstart - #number of overlapping values - overlapdf = values[(values["valuedatetime"]<= dfstart) & (values["valuedatetime"]>= dbend)] - count =len(overlapdf) - #if not overwrite. remove any overlapping values from df + overlap = dbend >= dfstart + # number of overlapping values + overlapdf = values[(values["valuedatetime"] <= dfstart) & (values["valuedatetime"] >= dbend)] + count = len(overlapdf) + + # if not overwrite. remove any overlapping values from df if overlap: if not overwrite: + # delete overlapping from the data frame before saving to the database values = values[values["valuedatetime"] > dbend] + else: + # delete overlapping values from the series database + count = self.memDB.series_service.delete_values_by_series(str(values["resultid"]), dfstart) + + # return the number of overlapping values return count def add_annotations(self, annolist): - #match up with existing values and get value id - #get df with only ValueID and AnnotationID - #remove any duplicates - #save df to db - pass + # match up with existing values and get value id + + print("Adding Annotations") + engine = self.memDB.series_service._session_factory.engine + q =self.memDB.series_service._session.query(TimeSeriesResultValues) \ + .filter(TimeSeriesResultValues.ResultID == int(min(annolist["resultid"]))) + query = q.statement.compile(dialect=engine.dialect) + # data = pd.read_sql_query(sql=query, con=self._session_factory.engine, + # params=query.params) + # query = "SELECT ValueID, ResultID, ValueDateTime FROM TimeSeriesResultValues Where ResultID="+annolist["ResultID"][0] + vals = pd.read_sql_query(sql=query, con=engine, params=query.params) + # remove any duplicates + annolist.drop_duplicates(["resultid", "annotationid", "valuedatetime"], keep='last', inplace=True) + newdf = pd.merge(annolist, vals, how='left', on=["resultid", "valuedatetime"], indicator=True) + # get only AnnotationID and ValueID + mynewdf= newdf[["valueid_y","annotationid"]] + mynewdf.columns = ["ValueID", "AnnotationID"] + # save df to db + self.memDB.series_service.add_annotations(mynewdf) @@ -877,11 +920,3 @@ def reconcile_dates(self, parent_series_id): pass - def get_current_time_and_utcoffset(self): - current_time = datetime.datetime.now() - utc_time = datetime.datetime.utcnow() - - difference_in_timezone = current_time - utc_time - offset_in_hours = difference_in_timezone.total_seconds() / 3600 - - return current_time, offset_in_hours \ No newline at end of file diff --git a/odmtools/odmservices/export_service.py b/odmtools/odmservices/export_service.py index 4b6d1eb..9cbd7a5 100644 --- a/odmtools/odmservices/export_service.py +++ b/odmtools/odmservices/export_service.py @@ -14,7 +14,7 @@ def __init__(self, series_service): def export_series_data(self, series_id, filename, utc=False, site=False, var=False, offset=False, qual=False, src=False, qcl=False): - series = self._series_service.get_series_by_id(series_id) + series = self._series_service.get_series(series_id) if series is None: return False @@ -144,11 +144,11 @@ def export_series_metadata(self, series_ids, filename): pass if isinstance(series_ids, int): - series = self._series_service.get_series_by_id(series_ids) + series = self._series_service.get_series(series_ids) self.append_series_node(series, list_root) else: for series_id in series_ids: - series = self._series_service.get_series_by_id(series_id) + series = self._series_service.get_series(series_id) self.append_series_node(series, list_root) tree = ET.ElementTree(root) diff --git a/odmtools/odmservices/series_service.py b/odmtools/odmservices/series_service.py index aec7c69..395b55d 100644 --- a/odmtools/odmservices/series_service.py +++ b/odmtools/odmservices/series_service.py @@ -3,11 +3,9 @@ from odm2api.ODM2.services import ReadODM2, UpdateODM2, DeleteODM2, CreateODM2 from odm2api import serviceBase from odm2api.ODM2.models import * -from odmtools.odmservices.to_sql_newrows import get_insert, get_delete, get_update import datetime -from odmtools.common.logger import LoggerTool import pandas as pd -logger =logging.getLogger('main') +logger = logging.getLogger('main') class SeriesService(serviceBase): @@ -19,10 +17,9 @@ def __init__(self, connection, debug=False): self.update = UpdateODM2(self._session_factory) self.delete = DeleteODM2(self._session_factory) self.create = CreateODM2(self._session_factory) - #send in engine + # send in engine setSchema(self._session_factory.engine) - def reset_session(self): self.read.reset_session() self.update.reset_session() @@ -45,9 +42,10 @@ def get_used_sites(self): except: return None - sf=[x[0] for x in self._session.query(distinct(FeatureActions.SamplingFeatureID)).filter(FeatureActions.FeatureActionID.in_(fas)).all()] + sf = [x[0] for x in self._session.query(distinct(FeatureActions.SamplingFeatureID)) + .filter(FeatureActions.FeatureActionID.in_(fas)).all()] - sites = self.read.getSamplingFeatures(type = "site", ids = sf) + sites = self.read.getSamplingFeatures(type="site", ids=sf) return sites def get_used_variables(self): @@ -63,8 +61,6 @@ def get_used_variables(self): vars= self.read.getVariables(ids = ids) return vars - - # Query DetailedResultInfo/series object is for Display purposes def get_all_series(self, siteid = None): """ @@ -74,7 +70,6 @@ def get_all_series(self, siteid = None): setSchema(self._session_factory.engine) - return self.read.getDetailedResultInfo('Time Series Coverage', sfID=siteid) def get_series(self, series_id=None): @@ -116,28 +111,8 @@ def get_variables_by_site_code(self, site_code): q = self._session.query(Variables).filter(Variables.VariableID.in_(var_ids)) return q.all() - # Data Value Methods - def get_values(self, series_id=None): - ''' - - :param series_id: Series id - :return: pandas dataframe - ''' - - setSchema(self._session_factory.engine) - q = self.read._session.query(TimeSeriesResultValues) - if series_id: - q = q.filter_by(ResultID=series_id) - q = q.order_by(TimeSeriesResultValues.ValueDateTime) - query = q.statement.compile(dialect=self._session_factory.engine.dialect) - data = pd.read_sql_query(sql=query, - con=self._session_factory.engine, - params=query.params) - data.set_index(data['valuedatetime'], inplace=True) - return data - # Series Catalog methods - def get_series_by_site(self , site_id): + def get_series_by_site(self, site_id): # try: # selectedSeries = self._edit_session.query(Series).filter_by(site_id=site_id).order_by(Series.id).all() # return selectedSeries @@ -152,17 +127,14 @@ def get_series_by_site(self , site_id): # return self.read.getResults(type="site", ids= [site_id])[0] return self.read.getResults(ids=[site_id]) - - # Site methods def get_all_sites(self): """ :return: List[Sites] """ - #return self._edit_session.query(Site).order_by(Site.code).all() + # return self._edit_session.query(Site).order_by(Site.code).all() return self.read.getResults(type="site") - def get_site_by_id(self, site_id): """ return a Site object that has an id=site_id @@ -174,14 +146,13 @@ def get_site_by_id(self, site_id): # except: # return None - return self.read.getSampling(ids = [site_id])[0] - + return self.read.getSampling(ids=[site_id])[0] def get_all_variables(self): """ :return: List[Variables] """ - #return self._edit_session.query(Variable).all() + # return self._edit_session.query(Variable).all() return self.read.getVariables() def get_variable_by_id(self, variable_id): @@ -193,8 +164,8 @@ def get_variable_by_id(self, variable_id): # return self._edit_session.query(Variable).filter_by(id=variable_id).first() # except: # return None - return self.read.getVariables(ids = [variable_id])[0] -# + return self.read.getVariables(ids=[variable_id])[0] + def get_variable_by_code(self, variable_code): """ @@ -205,10 +176,8 @@ def get_variable_by_code(self, variable_code): # return self._edit_session.query(Variable).filter_by(code=variable_code).first() # except: # return None - return self.read.getVariables(codes = [variable_code])[0] -# + return self.read.getVariables(codes=[variable_code])[0] -# # Unit methods def get_all_units(self): """ @@ -217,7 +186,7 @@ def get_all_units(self): """ # return self._edit_session.query(Unit).all() return self.read.getUnits() -# + def get_unit_by_name(self, unit_name): """ :param unit_name: str @@ -228,7 +197,7 @@ def get_unit_by_name(self, unit_name): # except: # return None return self.read.getUnits(name=[unit_name])[0] -# + def get_unit_by_id(self, unit_id): """ @@ -241,7 +210,6 @@ def get_unit_by_id(self, unit_id): # return None return self.read.getUnits(ids=[unit_id])[0] -# def get_all_qualifiers(self): """ @@ -251,7 +219,7 @@ def get_all_qualifiers(self): # return result ann= self.read.getAnnotations() return ann -# + def get_qualifier_by_code(self, code): """ @@ -259,10 +227,10 @@ def get_qualifier_by_code(self, code): # """ # result = self._edit_session.query(Qualifier).filter(Qualifier.code==code).first() # return result - return self.read.getAnnotations(codes=[code])[0] ##todo: CHECK ON THIS -# + return self.read.getAnnotations(codes=[code])[0] # todo: CHECK ON THIS + def get_qualifiers_by_series_id(self, series_id): - return self.read.getAnnotations(ids=[series_id])[0] ##todo: check on this + return self.read.getAnnotations(ids=[series_id])[0] # todo: check on this def get_all_processing_levels(self): return self.read.getProcessingLevels(ids=None, codes=None) @@ -277,25 +245,24 @@ def get_all_processing_levels(self): # return self._edit_session.query(Qualifier).join(subquery).distinct().all() # - def get_processing_level_by_id(self, qcl_id): try: - return self.read.getProcessingLevels(ids = [qcl_id])[0] - #return self._edit_session.query(QualityControlLevel).filter_by(id=qcl_id).first() + return self.read.getProcessingLevels(ids=[qcl_id])[0] + # return self._edit_session.query(QualityControlLevel).filter_by(id=qcl_id).first() except: return None -# + def get_processing_level_by_code(self, codes): try: return self.read.getProcessingLevels(codes=[codes])[0] except: return None -# # Method methods + # Method methods def get_all_methods(self): - #return self._edit_session.query(Method).all() + # return self._edit_session.query(Method).all() return self.read.getMethods() -# + def get_method_by_id(self, method_id): return self.read.getMethods(ids=[method_id])[0] # try: @@ -303,7 +270,7 @@ def get_method_by_id(self, method_id): # except: # result = None # return result -# + def get_method_by_code(self, method_code): try: return self.read.getMethods(codes=[method_code])[0] @@ -316,83 +283,68 @@ def get_method_by_code(self, method_code): # result = None # logger.error("method not found") # return result -# - -#todo: Take another look at this + # todo: Take another look at this # Series Catalog methods def resultExists(self, result): """ - :param site_id: - :param var_id: - :param method_id: - :param source_id: - :param qcl_id: + :param result :return: Series """ # unique Result # FeatureActionID, ResultTypeCV, VariableID, UnitsID, ProcessingLevelID, SampledMediumCV - try: # return self._edit_session.query(Results).filter_by( # VariableID=var_id, MethodID=method_id, # AnnotationID=qcl_id).first() - ret = self._session.query(exists().where(Results.ResultTypeCV == result.ResultTypeCV). - where(Results.VariableID == result.VariableID). - where(Results.UnitsID == result.UnitsID). - where(Results.ProcessingLevelID == result.ProcessingLevelID). - where(Results.SampledMediumCV == result.SampledMediumCV) + setSchema(self._session_factory.engine) + ret = self._session.query(exists().where(Results.ResultTypeCV == result.ResultTypeCV) + .where(Results.VariableID == result.VariableID) + .where(Results.UnitsID == result.UnitsID) + .where(Results.ProcessingLevelID == result.ProcessingLevelID) + .where(Results.SampledMediumCV == result.SampledMediumCV) ) - # where(Results.FeatureActionID == result.FeatureActionID). - - + # where(Results.FeatureActionID == result.FeatureActionID). return ret.scalar() + except: return None - + def get_series_by_meta(self, result): + setSchema(self._session_factory.engine) + id = self.read._session.query(Results)\ + .filter_by(ResultTypeCV=result.ResultTypeCV)\ + .filter_by(VariableID=result.VariableID)\ + .filter_by(UnitsID=result.UnitsID)\ + .filter_by(ProcessingLevelID=result.ProcessingLevelID)\ + .filter_by(SampledMediumCV=result.SampledMediumCV) + return id.first() def get_series_from_filter(self): # Pass in probably a Series object, match it against the database pass -# - #Data Value Methods + # Data Value Methods def get_values(self, series_id=None): - ''' - :param series_id: Series id + """ + :param series_id: :return: pandas dataframe - ''' - #series= self.get_series_by_id(series_id) - # if series: - # q = self._edit_session.query(DataValue).filter_by( - # site_id=series.site_id, - # variable_id=series.variable_id, - # method_id=series.method_id, - # source_id=series.source_id, - # quality_control_level_id=series.quality_control_level_id) - # - # query=q.statement.compile(dialect=self._session_factory.engine.dialect) - # data= pd.read_sql_query(sql= query, - # con = self._session_factory.engine, - # params = query.params ) - # #return data.set_index(data['LocalDateTime']) - # return data - # else: - # return None - + """ + # see get_annotations_by_result around line 850 + setSchema(self._session_factory.engine) q = self.read._session.query(TimeSeriesResultValues) if series_id: - q=q.filter_by(ResultID=series_id) - q= q.order_by(TimeSeriesResultValues.ValueDateTime) + q = q.filter_by(ResultID=series_id) + q = q.order_by(TimeSeriesResultValues.ValueDateTime) query = q.statement.compile(dialect=self._session_factory.engine.dialect) data = pd.read_sql_query(sql=query, con=self._session_factory.engine, params=query.params) data.set_index(data['valuedatetime'], inplace=True) + return data def get_all_values_df(self): @@ -404,34 +356,21 @@ def get_all_values_df(self): q = self.read._session.query(TimeSeriesResultValues).order_by(TimeSeriesResultValues.ValueDateTime) query = q.statement.compile(dialect=self._session_factory.engine.dialect) data = pd.read_sql_query(sql=query, con=self._session_factory.engine, - params=query.params) - #columns = list(data) - - # columns.insert(0, columns.pop(columns.index("DataValue"))) - # columns.insert(1, columns.pop(columns.index("ValueDateTime"))) - #columns.insert(2, columns.pop(columns.index("QualifierID"))) + params=query.params) - #data = data.ix[:, columns] return data.set_index(data['ValueDateTime']) - # q = self._edit_session.query(TimeSeriesResultValues).order_by(TimeSeriesResultValues.ValueDateTime) - # query = q.statement.compile(dialect = self._session_factory.engine.dialect) - # data = pd.read_sql_query(sql= query, - # con= self._session_factory.engine, - # params=query.params) -# - def get_all_values_list(self): """ :return: """ - result =self.read._session.query(TimeSeriesResultValues).order_by(TimeSeriesResultValues.ValueDateTime).all() + result = self.read._session.query(TimeSeriesResultValues).order_by(TimeSeriesResultValues.ValueDateTime).all() return [x.list_repr() for x in result] def get_all_values(self): return self.read._session.query(TimeSeriesResultValues).order_by(TimeSeriesResultValues.ValueDateTime).all() -# + @staticmethod def calcSeason(row): @@ -464,103 +403,96 @@ def get_plot_values(self, seriesID, noDataValue, startDate=None, endDate=None): data["month"] = data['valuedatetime'].apply(lambda x: x.month) data["year"] = data['valuedatetime'].apply(lambda x: x.year) data["season"] = data.apply(self.calcSeason, axis=1) - # data.set_index(data['valuedatetime'], inplace=True) return data def get_all_plot_values(self): setSchema(self._session_factory.engine) Values = self.get_values() data = Values[['datavalue', 'censorcodecv', 'valuedatetime']] - # data = data[data['datavalue'] != noDataValue] - data["month"] = data['valuedatetime'].apply(lambda x: x.month) data["year"] = data['valuedatetime'].apply(lambda x: x.year) data["season"] = data.apply(self.calcSeason, axis=1) - #data.set_index(data['valuedatetime'], inplace=True) return data - - - - -# def get_data_value_by_id(self, id): -# """ -# -# :param id: -# :return: -# """ -# try: -# return self._edit_session.query(DataValue).filter_by(id=id).first() -# except: -# return None -# -# -# - ##################### # # Update functions # ##################### -# def update_series(self, series): -# """ -# -# :param series: -# :return: -# """ -# merged_series = self._edit_session.merge(series) -# self._edit_session.add(merged_series) -# self._edit_session.commit() -# -# def update_dvs(self, dv_list): -# """ -# -# :param dv_list: -# :return: -# """ -# merged_dv_list = map(self._edit_session.merge, dv_list) -# self._edit_session.add_all(merged_dv_list) -# self._edit_session.commit() -# + def update_result(self, result): + # self.update.updateResult(result.ResultID, result.ValueCount) + self.update.updateResult(result=result) + + + def update_action(self, action): + self.update.updateAction(action=action) ##################### # # Create functions # ##################### + # new series + def create_result(self, var, proc, feature_action, aggcv, itsp, itspunit, status, type, units, medium): + new_result = TimeSeriesResults() + time, offset = self.get_current_time_and_utcoffset() + new_result.ResultDateTime = time + new_result.ResultDateTimeUTCOffset = offset -#new series - def createResult(self, var, meth, proc): - #also create an action - #copy old - #change var, meth proc, in df #intend ts, agg stat - # Result = None - # result = Results() - result = Results() + # create TimeSeriesResult - this should also contain all of the stuff for the Result + new_result.ValueCount = 0 + new_result.FeatureActionID = feature_action + new_result.ResultDateTime = time + new_result.ResultDateTimeUTCOffset = offset + new_result.VariableID = var + new_result.ProcessingLevelID = proc + new_result.AggregationStatisticCV = aggcv + new_result.IntendedTimeSpacingUnitsID = itspunit + new_result.IntendedTimeSpacing = itsp + new_result.StatusCV = status + new_result.ResultTypeCV = type + new_result.UnitsID = units + new_result.SampledMediumCV = medium - if isinstance(var, Variables): - result.VariableID = var.VariableID - result.VariableObj = var - if isinstance(meth, Methods): - # do something with meth - pass + self.create.createResult(result=new_result) + self._session.refresh(new_result) + return new_result - if isinstance(proc, ProcessingLevels): - result.ProcessingLevelID = proc.ProcessingLevelID - result.ProcessingLevelObj = proc - time, offset = self.get_current_time_and_utcoffset() - result.ResultDateTime = time - result.ResultDateTimeUTCOffset = offset + def create_action(self, methodid, description, filelink, begindate, utc, actionby): + new_action = Actions() + new_action.MethodID= methodid + new_action.ActionDescription = description + new_action.ActionFileLink = filelink + new_action.BeginDateTime = begindate + new_action.BeginDateTimeUTCOffset = utc + new_action.EndDateTime = None + new_action.EndDateTimeUTCOffset = None + new_action.ActionTypeCV = "Derivation" + + self.create.createAction(new_action) + action_by = new_action + action_by.ActionID = new_action.ActionID + action_by.IsActionLead = True + + self.create.createActionby(action_by) + + return new_action, actionby - return self.create.createResult(result=result) + def createFeatureAction(self, sfid, actionid): + feature_action = FeatureActions() + feature_action.SamplingFeatureID = sfid + feature_action.ActionID = actionid + self.create.createFeatureAction(feature_action) + + return feature_action def get_current_time_and_utcoffset(self): current_time = datetime.datetime.now() @@ -571,101 +503,31 @@ def get_current_time_and_utcoffset(self): return current_time, offset_in_hours - - - - -# def save_series(self, series, dvs): -# """ Save to an Existing Series -# :param series: -# :param data_values: -# :return: -# """ -# -# if self.series_exists(series): -# -# try: -# self._edit_session.add(series) -# self._edit_session.commit() -# self.save_values(dvs) -# except Exception as e: -# self._edit_session.rollback() -# raise e -# logger.info("Existing File was overwritten with new information") -# return True -# else: -# logger.debug("There wasn't an existing file to overwrite, please select 'Save As' first") -# # there wasn't an existing file to overwrite -# raise Exception("Series does not exist, unable to save. Please select 'Save As'") -# -# - # def save_new_series(self, series, dvs): - # """ Create as a new catalog entry - # :param series: - # :param data_values: - # :return: - # """ - # # Save As case - # if self.series_exists(series): - # msg = "There is already an existing file with this information. Please select 'Save' or 'Save Existing' to overwrite" - # logger.info(msg) - # raise Exception(msg) - # else: - # try: - # self._edit_session.add(series) - # self._edit_session.commit() - # self.save_values(dvs) - # #self._edit_session.add_all(dvs) - # except Exception as e: - # self._edit_session.rollback() - # raise e - # - # logger.info("A new series was added to the database, series id: "+str(series.id)) - # return True - - - - - - - def insert_annotations(self, annotations): - annotations.to_sql(name="timeseriesresultvalueannotations", if_exists='append', con=self._session_factory.engine, index=False) - - - def _get_df_query(self, values): - - resid = str(values['resultid'][0]) - sd = values['valuedatetime'].min() - ed = values['valuedatetime'].max() - q = self.read._session.query(TimeSeriesResultValues).\ - filter(TimeSeriesResultValues.ResultID == resid)#.\ - #filter(TimeSeriesResultValues.ValueDateTime.between(sd, ed)) - return q.statement.compile(dialect=self._session_factory.engine.dialect) - - def upsert_values(self, values): setSchema(self._session_factory.engine) query = self._get_df_query(values) - newvals= get_insert(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine) + newvals= self.get_insert(df=values, query=query, dup_cols=["valuedatetime", "resultid"], + engine=self._session_factory.engine) if not newvals.empty: self.insert_values(newvals) - delvals = get_delete(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine) + delvals = self.get_delete(df= values, query = query, dup_cols=["valuedatetime", "resultid"], + engine=self._session_factory.engine) if not delvals.empty: self.delete_dvs(delvals["valuedatetime"].tolist()) - upvals = get_update(df = values, query = query, dup_cols = ["valuedatetime", "resultid"], engine = self._session_factory.engine) + upvals = self.get_update(df=values, query=query, dup_cols=["valuedatetime", "resultid"], + engine=self._session_factory.engine) if not upvals.empty: self.update_values(upvals) - self._session.commit() def insert_values(self, values): """ - :param values: pandas dataframe :return: """ - values.to_sql(name="TimeSeriesResultValues", + setSchema(self._session_factory.engine) + values.to_sql(name=TimeSeriesResultValues.__tablename__, schema=TimeSeriesResultValues.__table_args__['schema'], if_exists='append', chunksize=1000, @@ -687,52 +549,13 @@ def update_values(self, updates): # update_list = {'value':updates["datavalue"].tolist(), 'id':updates.index.to_pydatetime().tolist()} vals = self.create._session.execute(stmt, update_list) - -# def create_new_series(self, data_values, site_id, variable_id, method_id, source_id, qcl_id): -# """ -# -# :param data_values: -# :param site_id: -# :param variable_id: -# :param method_id: -# :param source_id: -# :param qcl_id: -# :return: -# """ -# self.update_dvs(data_values) -# series = Series() -# series.site_id = site_id -# series.variable_id = variable_id -# series.method_id = method_id -# series.source_id = source_id -# series.quality_control_level_id = qcl_id -# -# self._edit_session.add(series) -# self._edit_session.commit() -# return series - - - def create_new_series(self, data_values, site_id, variable_id, method_id, source_id, qcl_id): - # ToDo: create a Result, TimeSeriesResult and an Action object of type derivation - """ - series_service -> Result in ODM2 - :param data_values: - :param site_id: - :param variable_id: - :param method_id: - :param source_id: - :param qcl_id: - :return: - """ - self.update_dvs(data_values) - series = Results() - series.site_id = site_id - series.variable_id = variable_id - series.method_id = method_id - series.source_id = source_id - series.quality_control_level_id = qcl_id - - return self.create_service.getResult(series) + def _get_df_query(self, values): + resid = str(values['resultid'][0]) + startdate = values['valuedatetime'].min() + ed = values['valuedatetime'].max() + q = self.read._session.query(TimeSeriesResultValues)\ + .filter(TimeSeriesResultValues.ResultID == resid) + return q.statement.compile(dialect=self._session_factory.engine.dialect) def create_method(self, description, link): """ @@ -788,6 +611,7 @@ def create_annotation(self, code, text, link=None): """ :param code: :param text: + :param link: :return: """ annotation = Annotations() @@ -804,9 +628,9 @@ def create_annotation(self, code, text, link=None): return self.create_annotation_by_anno(annotation) def add_annotations(self, anno_list): + setSchema(self._session_factory.engine) try: - #tablename = TimeSeriesResultValueAnnotations.__tablename__ - #print ("I am TS saving name the table name", tablename) + anno_list.to_sql(name="TimeSeriesResultValueAnnotations", schema=TimeSeriesResultValueAnnotations.__table_args__['schema'], if_exists='append', @@ -870,28 +694,36 @@ def get_quality_code(self): return self.read.getCVs(type="Quality Code") def get_annotation_by_code(self, code): - return self.read.getAnnotations(codes=[code])[0] + try: + return self.read.getAnnotations(codes=[code])[0] + except: + return None + def get_annotation_by_id(self, id): - return self.read.getAnnotations(ids=[id])[0] + try: + return self.read.getAnnotations(ids=[id])[0] + except: + return None + def get_all_annotations(self): - return self.read.getAnnotations(type=None) + try: + return self.read.getAnnotations(type=None) + except: + return None def get_annotations_by_result(self, resultid): + resultid = int(resultid) setSchema(self._session_factory.engine) - # ids = [x[0] for x in self.read._session.query(TimeSeriesResultValues.ValueID)\ - # .filter(TimeSeriesResultValues.ResultID == resultid).all()] - # q = self.read._session.query(TimeSeriesResultValueAnnotations)\ - # .filter(TimeSeriesResultValueAnnotations.ValueID.in_(ids)).all() - - q =self.read._session.query(TimeSeriesResultValueAnnotations.AnnotationID, TimeSeriesResultValueAnnotations.ValueID, - TimeSeriesResultValues.ResultID, TimeSeriesResultValues.ValueDateTime)\ + q = self.read._session.query(TimeSeriesResultValueAnnotations.AnnotationID, TimeSeriesResultValueAnnotations.ValueID, + TimeSeriesResultValues.ResultID, TimeSeriesResultValues.ValueDateTime, Annotations.AnnotationCode)\ .filter(TimeSeriesResultValues.ResultID == resultid)\ - .filter(TimeSeriesResultValueAnnotations.ValueID == TimeSeriesResultValues.ValueID) + .filter(TimeSeriesResultValueAnnotations.ValueID == TimeSeriesResultValues.ValueID)\ + .filter(Annotations.AnnotationID==TimeSeriesResultValueAnnotations.AnnotationID) query = q.statement.compile(dialect=self._session_factory.engine.dialect) data = pd.read_sql_query(sql=query, con=self._session_factory.engine, - params=query.params) + params=query.params) return data def get_aggregation_statistic(self): @@ -925,38 +757,21 @@ def get_all_affiliations(self): # raise e # # - def delete_values_by_series(self, series, startdate=None): + def delete_values_by_series(self, seriesid, startdate=None): """ :param series: :return: """ - # todo stephanie: add startdate stuff + try: - self.delete.deleteTSRValues(ids=[series.id]) + return self.delete.deleteTSRValues(ids=[seriesid], startdate=startdate) except Exception as ex: message = "Values were not successfully deleted: %s" % ex print message logger.error(message) raise ex - # try: - # q= self._edit_session.query(DataValue).filter_by(site_id = series.site_id, - # variable_id = series.variable_id, - # method_id = series.method_id, - # source_id = series.source_id, - # quality_control_level_id = series.quality_control_level_id) - # if startdate is not None: - # #start date indicates what day you should start deleting values. the values will delete to the end of the series - # return q.filter(DataValue.local_date_time >= startdate).delete() - # else: - # return q.delete() - # - # except Exception as ex: - # message = "Values were not successfully deleted: %s" % ex - # print message - # logger.error(message) - # raise ex - # + def delete_dvs(self, id_list): """ @@ -981,3 +796,44 @@ def get_values_by_series(self, series_id): q = q.order_by(TimeSeriesResultValues.ValueDateTime) return q.all() + + def get_delete(self, df, engine, query, dup_cols=[]): + + df.drop_duplicates(dup_cols, keep='last', inplace=True) + newdf = pd.merge(df, pd.read_sql(query, engine), how='right', on=dup_cols, indicator=True) + newdf = newdf[newdf['_merge'] == 'right_only'] + newdf.drop(['_merge'], axis=1, inplace=True) + return df[df['valuedatetime'].isin(newdf['valuedatetime'])] + + def get_update(self, df, engine, query, dup_cols=[]): + + df.drop_duplicates(dup_cols, keep='last', inplace=True) + newdf = pd.merge(df, pd.read_sql(query, engine), how='inner', on=dup_cols, indicator=True) + + newdf.drop(['_merge'], axis=1, inplace=True) + test = newdf[newdf['datavalue_x'] != newdf['datavalue_y']] + return df[df['valuedatetime'].isin(test['valuedatetime'])] + + def get_insert(self, df, engine, query, dup_cols=[]): + """ + Remove rows from a dataframe that already exist in a database + Required: + df : dataframe to remove duplicate rows from + engine: SQLAlchemy engine object + tablename: tablename to check duplicates in + dup_cols: list or tuple of column names to check for duplicate row values + Optional: + filter_continuous_col: the name of the continuous data column for BETWEEEN min/max filter + can be either a datetime, int, or float data type + useful for restricting the database table size to check + filter_categorical_col : the name of the categorical data column for Where = value check + Creates an "IN ()" check on the unique values in this column + Returns + Unique list of values from dataframe compared to database table + """ + df.drop_duplicates(dup_cols, keep='last', inplace=True) + newdf = pd.merge(df, pd.read_sql(query, engine), how='left', on=dup_cols, indicator=True) + newdf = newdf[newdf['_merge'] == 'left_only'] + newdf.drop(['_merge'], axis=1, inplace=True) + return df[df['valuedatetime'].isin(newdf['valuedatetime'])] + diff --git a/odmtools/odmservices/service_manager.py b/odmtools/odmservices/service_manager.py index 94f331d..d32b767 100755 --- a/odmtools/odmservices/service_manager.py +++ b/odmtools/odmservices/service_manager.py @@ -29,6 +29,7 @@ def __init__(self, debug=False, conn_dict=None): self.debug = debug f = self._get_file('r') self._conn_dicts = [] + #self.version = 0 self._connection_format = "%s+%s://%s:%s@%s/%s" @@ -129,6 +130,14 @@ def delete_connection(self, conn_dict): self._conn_dicts[:] = [x for x in self._conn_dicts if x != conn_dict] def get_series_service(self, conn_dict=None, conn_string=""): + #TODO check connection what if they are changing + + if 'series_service' in locals():# or self.series_service is None): + return self.series_service + else: + return self._create_series_service( conn_dict, conn_string) + + def _create_series_service(self, conn_dict=None, conn_string=""): if not conn_dict: conn_dict = self.get_current_conn_dict() diff --git a/odmtools/view/clsDBConfig.py b/odmtools/view/clsDBConfig.py index 3e6231b..8c3d0e8 100644 --- a/odmtools/view/clsDBConfig.py +++ b/odmtools/view/clsDBConfig.py @@ -172,13 +172,14 @@ def __init__(self, parent): self.stVersion.Wrap(-1) connectionSizer.Add(self.stVersion, 0, wx.ALL | wx.ALIGN_RIGHT | wx.EXPAND, 5) - version_choices = [ u"1.1", u"2.0"] - self.cbVersion = wx.ComboBox(self, wx.ID_ANY, u"1.1", wx.DefaultPosition, wx.DefaultSize, + # version_choices = [ u"1.1", u"2.0"] + version_choices = [u"2.0"] + self.cbVersion = wx.ComboBox(self, wx.ID_ANY, u"2.0", wx.DefaultPosition, wx.DefaultSize, version_choices, wx.CB_READONLY )#| wx.CB_SORT) self.cbVersion.SetSelection(0) connectionSizer.Add(self.cbVersion, 1, wx.ALL | wx.EXPAND, 5) - self.stConnType = wx.StaticText(self, wx.ID_ANY, u"Connection Type:", wx.DefaultPosition, wx.DefaultSize, + self.stConnType = wx.StaticText(self, wx.ID_ANY, u"ODM Version:", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_RIGHT) self.stConnType.Wrap(-1) connectionSizer.Add(self.stConnType, 0, wx.ALL | wx.EXPAND | wx.ALIGN_RIGHT, 5) diff --git a/setup/Mac/ODMTools.spec b/setup/Mac/ODMTools.spec index c009faf..0c4c2dc 100644 --- a/setup/Mac/ODMTools.spec +++ b/setup/Mac/ODMTools.spec @@ -5,33 +5,29 @@ block_cipher = None a = Analysis(['/Users/stephanie/DEV/ODMToolsPython/ODMTools.py'], pathex=['/Users/stephanie/DEV/ODMToolsPython/setup/Mac'], - binaries=None, - datas=None, hiddenimports=[], - hookspath=['/Users/stephanie/DEV/ODMToolsPython/setup/hooks'], + hookspath=None, runtime_hooks=None, - excludes=['PyQt4', 'PyQt4.QtCore', 'PyQt4.QtGui'], - win_no_prefer_redirects=False, - win_private_assemblies=False, + excludes=None, cipher=block_cipher) -pyz = PYZ(a.pure, a.zipped_data, +pyz = PYZ(a.pure, cipher=block_cipher) exe = EXE(pyz, a.scripts, exclude_binaries=True, name='ODMTools', debug=False, - strip=False, + strip=None, upx=True, - console=False , version='/Users/stephanie/DEV/ODMToolsPython/setup/version.txt', icon='/Users/stephanie/DEV/ODMToolsPython/odmtools/common/icons/ODMTools.icns') + console=False , version='/Users/stephanie/DEV/ODMToolsPython/setup/version.txt', icon='odmtools/common/icons/ODMTools.icns') coll = COLLECT(exe, a.binaries, a.zipfiles, a.datas, - strip=False, - upx=False, + strip=None, + upx=True, name='ODMTools') app = BUNDLE(coll, name='ODMTools.app', - icon='/Users/stephanie/DEV/ODMToolsPython/odmtools/common/icons/ODMTools.icns', + icon='odmtools/common/icons/ODMTools.icns', bundle_identifier=None) diff --git a/setup/Mac/build.sh b/setup/Mac/build.sh index cbd8b59..5192562 100755 --- a/setup/Mac/build.sh +++ b/setup/Mac/build.sh @@ -2,10 +2,13 @@ echo "Building!!" +echo "cleanup" if [ -d build ] && [ -d dist ]; then echo "Cleaning up old build and dist files" - rm -ir build dist + rm -r --interactive=once build dist fi - -sudo python setup.py py2app +echo "activate environment" +source activate odmtools +echo "run py2app" +sudo python ../setuptest.py py2app #sudo /usr/local/Cellar/python/2.7.8/bin/python setup.py py2app diff --git a/setup/Windows/ODMTools.spec b/setup/Windows/ODMTools.spec index b375c2e..cf6bc60 100644 --- a/setup/Windows/ODMTools.spec +++ b/setup/Windows/ODMTools.spec @@ -1,44 +1,10 @@ - - -a = Analysis(['D:\\DEV\\ODMTools\\ODMTools.py'], - pathex=['D:\\DEV\\ODMTools\\setup\\Windows'], - binaries=None, - datas=None, - hiddenimports=[], - hookspath=['../hooks'], - runtime_hooks=None, - excludes=['PyQt4', 'PyQt4.QtCore', 'PyQt4.QtGui'], - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=block_cipher) -pyz = PYZ(a.pure, a.zipped_data, - cipher=block_cipher) -exe = EXE(pyz, - a.scripts, - exclude_binaries=True, - name='ODMTools', - debug=False, - strip=False, - upx=False, - console=True , version='D:\\DEV\\ODMTools\\setup\\version.txt', icon='D:\\DEV\\ODMTools\odmtools\\common\\icons\\ODMTools.ico') -coll = COLLECT(exe, - a.binaries, - a.zipfiles, - a.datas, - strip=False, - upx=True, - name='ODM2Tools') - - - - # -*- mode: python -*- block_cipher = None -a = Analysis(['ODMTools.py'], - pathex=['D:\\DEV\\ODMTools'], +a = Analysis(['D:\\DEV\\ODMTools\\ODMTools.py'], + pathex=['D:\\DEV\\ODMTools\\setup\\Windows'], binaries=None, datas=None, hiddenimports=[], @@ -56,12 +22,12 @@ exe = EXE(pyz, name='ODMTools', debug=False, strip=False, - upx=True, - console=True ) + upx=False, + console=True , version='D:\\DEV\\ODMTools\\setup\\version.txt', icon='D:\\DEV\\ODMTools\\odmtools\\common\\icons\\ODMTools.ico') coll = COLLECT(exe, a.binaries, a.zipfiles, a.datas, strip=False, - upx=True, + upx=False, name='ODMTools') diff --git a/setup/make.py b/setup/make.py index b4ccbf6..e5752c0 100644 --- a/setup/make.py +++ b/setup/make.py @@ -16,7 +16,7 @@ ## Update odmtools.meta.data whenever creating a release from odmtools.meta import data -BASE_DIR = os.path.dirname(os.path.realpath(__file__)) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) SETUP_DIR = os.path.join(BASE_DIR, 'setup') WIN_DIR = os.path.join(SETUP_DIR, "Windows") MAC_DIR = os.path.join(SETUP_DIR, "Mac") @@ -28,11 +28,11 @@ MAC_WORK_DIR = os.path.join(MAC_DIR, "Temp") WORK_DIR = os.path.join(WIN_DIR, "Temp") -ICON_DIR = os.path.join('odmtools', 'common', "icons") +ICON_DIR = os.path.join(BASE_DIR, 'odmtools', 'common', "icons") WIN_ICON_FILE = os.path.join(ICON_DIR, "ODMTools.ico") MAC_ICON_FILE = os.path.join(ICON_DIR, "ODMTools.icns") -APP_DIR = os.path.join(MAC_DIR, 'Dist', "ODMTools.app") +APP_DIR = os.path.join(MAC_DIR, 'Dist', "ODM2Tools.app") # Location of Windows files APP_FILE = os.path.join(BASE_DIR, "ODMTools.py") MAKE_FILE = os.path.realpath(__file__) @@ -94,7 +94,7 @@ def zipdir(basedir, archivename): z.write(absfn, zfn) def printInfo(): print "=============================================================" - print "= ODMTools Installer " + print "= ODM2Tools Installer " print "= Be sure to update odmtools/meta/data with every release " print "= Building release: {version}".format(version=data.version), print "\n= Platform: {platform}, {architecture}".format(platform=sys.platform, architecture=platform.architecture()), "\n=" diff --git a/setup/setup.py b/setup/setup.py index d61332f..d872c03 100644 --- a/setup/setup.py +++ b/setup/setup.py @@ -11,8 +11,23 @@ python setup.py py2exe """ +import macholib +#print("~"*60 + "macholib verion: "+macholib.__version__) +if macholib.__version__ <= "1.7": + print("Applying macholib patch...") + import macholib.dyld + import macholib.MachOGraph + dyld_find_1_7 = macholib.dyld.dyld_find + def dyld_find(name, loader=None, **kwargs): + #print("~"*60 + "calling alternate dyld_find") + if loader is not None: + kwargs['loader_path'] = loader + return dyld_find_1_7(name, **kwargs) + macholib.MachOGraph.dyld_find = dyld_find + import sys +import os ''' from setuptools import setup @@ -23,31 +38,43 @@ -NAME = 'ODMTools' +NAME = 'ODM2Tools' +BASE_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +ICON_DIR = os.path.join(BASE_DIR, 'odmtools', 'common', "icons") +WIN_ICON_FILE = os.path.join(ICON_DIR, "ODMTools.ico") +MAC_ICON_FILE = os.path.join(ICON_DIR, "ODMTools.icns") +#APP = ['/Users/stephanie/DEV/ODMToolsPython/ODMTools.py'] +APP = os.path.join(BASE_DIR, 'ODMTools.py') extra_options = None sys.setrecursionlimit(2000) if sys.platform == 'darwin': sys.argv.append('py2app') from setuptools import setup - APP = ['/Users/stephanie/DEV/ODMToolsPython/ODMTools.py'] - LIBS = ['/usr/X11/lib/libfreetype.6.dylib', '/usr/X11/lib/libstdc++.6.dylib', '/usr/X11/lib/libpng15.15.dylib'] - OPTIONS = {'iconfile': '/Users/stephanie/DEV/ODMToolsPython/odmtools/common/icons/ODMTools.icns', - 'includes': ['pymysql', 'sqlalchemy', 'dateutil'], 'frameworks': LIBS} + # APP = ['/Users/stephanie/DEV/ODMToolsPython/ODMTools.py'] + LIBS = ['/usr/X11/lib/libfreetype.6.dylib', '/usr/X11/lib/libstdc++.6.dylib', '/usr/X11/lib/libpng15.15.dylib', '/anaconda/lib/libwx_osx_cocoau-3.0.0.0.0.dylib'] + OPTIONS = {'iconfile': MAC_ICON_FILE, + 'includes': ['pymysql', 'sqlalchemy', 'dateutil'], + 'frameworks': LIBS} extra_options = dict(app=APP, setup_requires=['py2app'], options={'py2app': OPTIONS}) elif sys.platform == 'win32': sys.argv.append('py2exe') from distutils.core import setup - APP = ['C:\Users\Jacob\Documents\ODMToolsPython\ODMTools.py'] + APP = ['D:\Dev\ODMTools\ODMTools.py'] import numpy import py2exe from glob import glob data_files = [ ("Microsoft.VC90.CRT", glob(r'C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\Microsoft.VC90.CRT\*.*')), - (r'mpl-data', [r'C:\Anaconda\envs\odmtools\Lib\site-packages\matplotlib\mpl-data\matplotlibrc']), - (r'mpl-data\images', glob(r'C:\Anaconda\envs\odmtools\Lib\site-packages\matplotlib\mpl-data\images\*.*')), - (r'mpl-data\fonts', glob(r'C:\Anaconda\envs\odmtools\Lib\site-packages\matplotlib\mpl-data\fonts\*.*'))] + (r'mpl-data', [r'C:\Anaconda3\envs\odmtools\Lib\site-packages\matplotlib\mpl-data\matplotlibrc']), + (r'mpl-data\images', glob(r'C:\Anaconda3\envs\odmtools_release\Lib\site-packages\matplotlib\mpl-data\images\*.*')), + (r'mpl-data\fonts', glob(r'C:\Anaconda3\envs\odmtools_release\Lib\site-packages\matplotlib\mpl-data\fonts\*.*')), + (r'mpl-data\stylelib', glob(r'C:\Anaconda3\envs\odmtools_release\Lib\site-packages\matplotlib\mpl-data\stylelib\*.*')), + # ('.', glob('*.dll')), + # ('.', glob('C:\Windows\system32\OPENGL32.dll'))] + ('.', glob(r'C:\Anaconda3\envs\odmtools_release\Library\bin\mkl_p4m.dll')), + ('.', glob(r'C:\Anaconda3\envs\odmtools_release\Library\bin\mkl_p4.dll'))] OPTIONS = { #'excludes': ['_ssl', 'pyreadline', 'difflib', 'doctest', 'optparse', 'pickle', 'calendar'], @@ -57,18 +84,22 @@ 'MSVCP90.dll', 'WS2_32.dll', 'WINSPOOL.DRV', 'GDI32.dll', 'KERNEL32.dll', 'ntdll.dll', 'COMCTL32.dll', 'COMDLG32.dll', 'msvcrt.dll', 'RPCRT4.dll'], "optimize": 2, + # "includes": ['C:\Windows\system32\OPENGL32.dll', + # 'C:\Anaconda3\envs\odmtools_release\Library\bin\mkl_p4.dll', + # 'C:\Anaconda3\envs\odmtools_release\Library\bin\mkl_p4m.dll'], "bundle_files": 3, "dist_dir": "dist", "xref": False, "skip_archive": False, "ascii": False, "custom_boot_script": '', - "packages": ['wx.lib.pubsub', 'ObjectListView', 'pyodbc'], + "packages": ['wx.lib.pubsub', 'pyodbc', 'numpy', 'scipy', 'sqlalchemy', 'wx', 'pandas'], #'ObjectListView', + #make sure that mkl_p4.dll and mkl_p4m.dll have been copied into the Dist folder } sys.path.append("C:\\Program Files (x86)\\Microsoft Visual Studio 10.0\\VC\\Microsoft.VC90.CRT") - sys.path.append("C:\\Users\\Jacob\\Documents\\ODMToolsPython") + sys.path.append(BASE_DIR) extra_options = dict(console=APP, data_files=data_files, options={'py2exe': OPTIONS}) setup(name=NAME, **extra_options) diff --git a/tests/test_odmservices/test_edit_service.py b/tests/test_odmservices/test_edit_service.py index 9fea6c4..90b2d83 100644 --- a/tests/test_odmservices/test_edit_service.py +++ b/tests/test_odmservices/test_edit_service.py @@ -72,7 +72,7 @@ def test_duplicate_values_filter(self): def test_save_series(self): stlen = len(self.series.data_values) assert self.edit_service.save() - val = self.series_service.get_series_by_id(self.series.id) + val = self.series_service.get_series(self.series.id) assert len(val.data_values)==stlen def test_save_as_series(self): @@ -96,7 +96,7 @@ def test_save_append_keep(self): svalue = self.series.data_values[0] self.edit_service.memDB.updateValue([svalue.local_date_time],'+', 5 ) - news= self.edit_service.memDB.series_service.get_series_by_id(self.series.id) + news= self.edit_service.memDB.series_service.get_series(self.series.id) result = self.edit_service.save_appending(overwrite = False) len2= len(self.series.data_values) assert len1 == len2 @@ -108,7 +108,7 @@ def test_save_append_overwrite(self): svalue = self.series.data_values[0] self.edit_service.memDB.updateValue([svalue.local_date_time],'+', 5) - news= self.edit_service.memDB.series_service.get_series_by_id(self.series.id) + news= self.edit_service.memDB.series_service.get_series(self.series.id) result = self.edit_service.save_appending(overwrite = True) len2= len(self.series.data_values) assert len1 == len2 diff --git a/tests/test_odmservices/test_series_service.py b/tests/test_odmservices/test_series_service.py index ec16828..e98b551 100644 --- a/tests/test_odmservices/test_series_service.py +++ b/tests/test_odmservices/test_series_service.py @@ -183,10 +183,10 @@ def test_get_all_series(self): assert series.id == all_series[0].id def test_get_series_by_id(self): - assert self.series_service.get_series_by_id(10) == None + assert self.series_service.get_series(10) == None series = test_util.add_series(self.session) - db_series = self.series_service.get_series_by_id(series.id) + db_series = self.series_service.get_series(series.id) assert series.id == db_series.id @@ -292,7 +292,7 @@ def test_delete_dvs(self): subset = dvs[:5] self.series_service.delete_dvs([x.local_date_time for x in subset]) assert self.series_service.get_data_value_by_id(subset[0].id) == None - series = self.series_service.get_series_by_id(series.id) # Reload + series = self.series_service.get_series(series.id) # Reload assert len(series.data_values) == 5 def test_update_dvs(self): @@ -304,7 +304,7 @@ def test_update_dvs(self): subset[i].data_value = 100 self.series_service.update_dvs(subset) - series = self.series_service.get_series_by_id(series.id) + series = self.series_service.get_series(series.id) assert series.data_values[0].data_value == 100 def test_create_new_series(self): @@ -338,7 +338,7 @@ def test_update_series(self): self.series_service.update_series(series) - series = self.series_service.get_series_by_id(series.id) + series = self.series_service.get_series(series.id) assert series.site_code == "NEW" assert series.variable_code == "NEW" @@ -367,16 +367,16 @@ def test_create_qcl(self): def test_delete_series(self): series = test_util.add_series(self.session) - assert self.series_service.get_series_by_id(series.id) != None + assert self.series_service.get_series(series.id) != None self.series_service.delete_series(series) - assert self.series_service.get_series_by_id(series.id) == None + assert self.series_service.get_series(series.id) == None def test_delete_values(self): series = test_util.add_series(self.session) - assert self.series_service.get_series_by_id(series.id) != None + assert self.series_service.get_series(series.id) != None self.series_service.delete_values_by_series(series) - val = self.series_service.get_series_by_id(series.id) + val = self.series_service.get_series(series.id) print val assert val != None