From e311fb7fce2cecc7551b25e875dd9efcab168f9c Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Fri, 1 May 2015 04:38:46 -0600 Subject: [PATCH 1/4] removing conditional db connections --- qiita_db/analysis.py | 14 +++----- qiita_db/base.py | 7 ++-- qiita_db/data.py | 4 +-- .../base_metadata_template.py | 9 ++--- qiita_db/parameters.py | 4 +-- qiita_db/user.py | 6 ++-- qiita_db/util.py | 35 ++++++++++--------- 7 files changed, 38 insertions(+), 41 deletions(-) diff --git a/qiita_db/analysis.py b/qiita_db/analysis.py index 37fb276df..15360485c 100644 --- a/qiita_db/analysis.py +++ b/qiita_db/analysis.py @@ -663,8 +663,7 @@ def build_files(self, rarefaction_depth=None): def _get_samples(self, conn_handler=None): """Retrieves dict of samples to proc_data_id for the analysis""" - conn_handler = conn_handler if conn_handler is not None \ - else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() sql = ("SELECT processed_data_id, array_agg(sample_id ORDER BY " "sample_id) FROM qiita.analysis_sample WHERE analysis_id = %s " "GROUP BY processed_data_id") @@ -701,8 +700,7 @@ def _build_biom_tables(self, samples, rarefaction_depth, new_tables[data_type] = new_tables[data_type].merge(table) # add the new tables to the analysis - conn_handler = conn_handler if conn_handler is not None \ - else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() _, base_fp = get_mountpoint(self._table)[0] for dt, biom_table in viewitems(new_tables): # rarefy, if specified @@ -719,9 +717,7 @@ def _build_biom_tables(self, samples, rarefaction_depth, def _build_mapping_file(self, samples, conn_handler=None): """Builds the combined mapping file for all samples Code modified slightly from qiime.util.MetadataMap.__add__""" - conn_handler = conn_handler if conn_handler is not None \ - else SQLConnectionHandler() - + conn_handler = SQLConnectionHandler() all_sample_ids = set() sql = """SELECT filepath_id, filepath FROM qiita.filepath @@ -791,8 +787,8 @@ def _add_file(self, filename, filetype, data_type=None, conn_handler=None): data_type : str, optional conn_handler : SQLConnectionHandler object, optional """ - conn_handler = conn_handler if conn_handler is not None \ - else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() \ + filetype_id = convert_to_id(filetype, 'filepath_type', conn_handler) _, mp = get_mountpoint('analysis', conn_handler)[0] diff --git a/qiita_db/base.py b/qiita_db/base.py index d2d34a088..3f42e6d11 100644 --- a/qiita_db/base.py +++ b/qiita_db/base.py @@ -132,8 +132,7 @@ def _check_id(self, id_, conn_handler=None): """ self._check_subclass() - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() return conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE " @@ -265,8 +264,8 @@ def check_status(self, status, exclude=False, conn_handler=None): self._check_subclass() # Get all available statuses - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + statuses = [x[0] for x in conn_handler.execute_fetchall( "SELECT DISTINCT status FROM qiita.{0}_status".format(self._table), (self._id, ))] diff --git a/qiita_db/data.py b/qiita_db/data.py index a218a5b9e..007733e41 100644 --- a/qiita_db/data.py +++ b/qiita_db/data.py @@ -147,7 +147,7 @@ def add_filepaths(self, filepaths, conn_handler=None): # Check if the connection handler has been provided. Create a new # one if not. - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() # Update the status of the current object self._set_link_filepaths_status("linking") @@ -478,7 +478,7 @@ def _is_preprocessed(self, conn_handler=None): bool whether the RawData has been preprocessed or not """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() return conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.prep_template_preprocessed_data" " PTPD JOIN qiita.prep_template PT ON PT.prep_template_id = " diff --git a/qiita_db/metadata_template/base_metadata_template.py b/qiita_db/metadata_template/base_metadata_template.py index a552469e8..1b0d3d6e3 100644 --- a/qiita_db/metadata_template/base_metadata_template.py +++ b/qiita_db/metadata_template/base_metadata_template.py @@ -492,8 +492,9 @@ class MetadataTemplate(QiitaObject): def _check_id(self, id_, conn_handler=None): r"""Checks that the MetadataTemplate id_ exists on the database""" self._check_subclass() - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE " "{1}=%s)".format(self._table, self._id_column), @@ -1032,7 +1033,7 @@ def add_filepath(self, filepath, conn_handler=None, fp_id=None): # Check if the connection handler has been provided. Create a new # one if not. - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() fp_id = self._fp_id if fp_id is None else fp_id try: @@ -1056,7 +1057,7 @@ def get_filepaths(self, conn_handler=None): # Check if the connection handler has been provided. Create a new # one if not. - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() try: filepath_ids = conn_handler.execute_fetchall( diff --git a/qiita_db/parameters.py b/qiita_db/parameters.py index cdc432fa7..43e72eb1c 100644 --- a/qiita_db/parameters.py +++ b/qiita_db/parameters.py @@ -133,8 +133,8 @@ def _check_id(self, id_, conn_handler=None): """ self._check_subclass() - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE {1} = %s)".format( self._table, self._column_id), diff --git a/qiita_db/user.py b/qiita_db/user.py index b78a2a712..ab1a3d05c 100644 --- a/qiita_db/user.py +++ b/qiita_db/user.py @@ -82,8 +82,8 @@ def _check_id(self, id_, conn_handler=None): """ self._check_subclass() - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.qiita_user WHERE " "email = %s)", (id_, ))[0] @@ -454,7 +454,7 @@ def _change_pass(self, newpass, conn_handler=None): sql = ("UPDATE qiita.{0} SET password=%s, pass_reset_code=NULL WHERE " "email = %s".format(self._table)) - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() conn_handler.execute(sql, (hash_password(newpass), self._id)) diff --git a/qiita_db/util.py b/qiita_db/util.py index 0f7f37993..53b6a25ee 100644 --- a/qiita_db/util.py +++ b/qiita_db/util.py @@ -341,7 +341,7 @@ def get_table_cols(table, conn_handler=None): list of str The column headers of `table` """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() headers = conn_handler.execute_fetchall( "SELECT column_name FROM information_schema.columns WHERE " "table_name=%s AND table_schema='qiita'", (table, )) @@ -363,7 +363,7 @@ def get_table_cols_w_type(table, conn_handler=None): list of tuples of (str, str) The column headers and data type of `table` """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() return conn_handler.execute_fetchall( "SELECT column_name, data_type FROM information_schema.columns WHERE " "table_name=%s", (table,)) @@ -412,8 +412,8 @@ def get_db_files_base_dir(conn_handler=None): str The path to the base directory of all db files """ - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( "SELECT base_data_dir FROM settings")[0] @@ -426,8 +426,8 @@ def get_work_base_dir(conn_handler=None): str The path to the base directory of all db files """ - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( "SELECT base_work_dir FROM settings")[0] @@ -555,8 +555,8 @@ def get_mountpoint(mount_type, conn_handler=None, retrieve_all=False): list List of tuple, where: [(id_mountpoint, filepath_of_mountpoint)] """ - conn_handler = (conn_handler if conn_handler is not None - else SQLConnectionHandler()) + conn_handler = SQLConnectionHandler() + if retrieve_all: result = conn_handler.execute_fetchall( "SELECT data_directory_id, mountpoint, subdirectory FROM " @@ -586,7 +586,7 @@ def get_mountpoint_path_by_id(mount_id, conn_handler=None): str The mountpoint path """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() mountpoint, subdirectory = conn_handler.execute_fetchone( """SELECT mountpoint, subdirectory FROM qiita.data_directory WHERE data_directory_id=%s""", (mount_id,)) @@ -676,7 +676,7 @@ def purge_filepaths(conn_handler=None): conn_handler : SQLConnectionHandler, optional The connection handler object connected to the DB """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() # Get all the (table, column) pairs that reference to the filepath table # Code adapted from http://stackoverflow.com/q/5347050/3746629 @@ -733,7 +733,7 @@ def move_filepaths_to_upload_folder(study_id, filepaths, conn_handler=None): conn_handler : SQLConnectionHandler, optional The connection handler object connected to the DB """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() uploads_fp = join(get_mountpoint("uploads")[0][1], str(study_id)) # We can now go over and remove all the filepaths @@ -848,7 +848,7 @@ def convert_to_id(value, table, conn_handler=None): IncompetentQiitaDeveloperError The passed string has no associated id """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() sql = "SELECT {0}_id FROM qiita.{0} WHERE {0} = %s".format(table) _id = conn_handler.execute_fetchone(sql, (value, )) if _id is None: @@ -879,7 +879,7 @@ def convert_from_id(value, table, conn_handler=None): ValueError The passed id has no associated string """ - conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + conn_handler = SQLConnectionHandler() string = conn_handler.execute_fetchone( "SELECT {0} FROM qiita.{0} WHERE {0}_id = %s".format(table), (value, )) @@ -989,8 +989,9 @@ def get_environmental_packages(conn_handler=None): environmental package name and the second string is the table where the metadata for the environmental package is stored """ - conn = conn_handler if conn_handler else SQLConnectionHandler() - return conn.execute_fetchall("SELECT * FROM qiita.environmental_package") + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchall( + "SELECT * FROM qiita.environmental_package") def get_timeseries_types(conn_handler=None): @@ -1007,8 +1008,8 @@ def get_timeseries_types(conn_handler=None): The available timeseries types. Each timeseries type is defined by the tuple (timeseries_id, timeseries_type, intervention_type) """ - conn = conn_handler if conn_handler else SQLConnectionHandler() - return conn.execute_fetchall( + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchall( "SELECT * FROM qiita.timeseries_type ORDER BY timeseries_type_id") From 520bcfa3a240977d6d392249470dc6461af2983f Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Fri, 1 May 2015 05:18:12 -0600 Subject: [PATCH 2/4] removing conn_handler from def and calls - 1 --- qiita_db/analysis.py | 15 +++---- qiita_db/base.py | 4 +- qiita_db/data.py | 18 ++++---- qiita_db/job.py | 10 ++--- .../base_metadata_template.py | 8 ++-- qiita_db/metadata_template/prep_template.py | 5 +-- qiita_db/parameters.py | 4 +- qiita_db/reference.py | 19 ++++---- qiita_db/study.py | 3 +- .../patches/python_patches/15.py | 2 +- qiita_db/test/test_analysis.py | 6 +-- qiita_db/test/test_util.py | 9 ++-- qiita_db/user.py | 2 +- qiita_db/util.py | 45 +++++++------------ 14 files changed, 60 insertions(+), 90 deletions(-) diff --git a/qiita_db/analysis.py b/qiita_db/analysis.py index 15360485c..de415eaba 100644 --- a/qiita_db/analysis.py +++ b/qiita_db/analysis.py @@ -656,12 +656,12 @@ def build_files(self, rarefaction_depth=None): raise ValueError("rarefaction_depth must be greater than 0") conn_handler = SQLConnectionHandler() - samples = self._get_samples(conn_handler=conn_handler) - self._build_mapping_file(samples, conn_handler=conn_handler) + samples = self._get_samples() + self._build_mapping_file(samples) self._build_biom_tables(samples, rarefaction_depth, conn_handler=conn_handler) - def _get_samples(self, conn_handler=None): + def _get_samples(self): """Retrieves dict of samples to proc_data_id for the analysis""" conn_handler = SQLConnectionHandler() sql = ("SELECT processed_data_id, array_agg(sample_id ORDER BY " @@ -714,7 +714,7 @@ def _build_biom_tables(self, samples, rarefaction_depth, self._add_file("%d_analysis_%s.biom" % (self._id, dt), "biom", data_type=dt, conn_handler=conn_handler) - def _build_mapping_file(self, samples, conn_handler=None): + def _build_mapping_file(self, samples): """Builds the combined mapping file for all samples Code modified slightly from qiime.util.MetadataMap.__add__""" conn_handler = SQLConnectionHandler() @@ -787,11 +787,10 @@ def _add_file(self, filename, filetype, data_type=None, conn_handler=None): data_type : str, optional conn_handler : SQLConnectionHandler object, optional """ - conn_handler = SQLConnectionHandler() \ - + conn_handler = SQLConnectionHandler() - filetype_id = convert_to_id(filetype, 'filepath_type', conn_handler) - _, mp = get_mountpoint('analysis', conn_handler)[0] + filetype_id = convert_to_id(filetype, 'filepath_type') + _, mp = get_mountpoint('analysis')[0] fpid = insert_filepaths([ (join(mp, filename), filetype_id)], -1, 'analysis', 'filepath', conn_handler, move_files=False)[0] diff --git a/qiita_db/base.py b/qiita_db/base.py index 3f42e6d11..9773fb078 100644 --- a/qiita_db/base.py +++ b/qiita_db/base.py @@ -113,15 +113,13 @@ def _check_subclass(cls): raise IncompetentQiitaDeveloperError( "Could not instantiate an object of the base class") - def _check_id(self, id_, conn_handler=None): + def _check_id(self, id_): r"""Check that the provided ID actually exists on the database Parameters ---------- id_ : object The ID to test - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB Notes ----- diff --git a/qiita_db/data.py b/qiita_db/data.py index 007733e41..469e905d4 100644 --- a/qiita_db/data.py +++ b/qiita_db/data.py @@ -193,11 +193,11 @@ def get_filepaths(self): self._data_filepath_table, self._data_filepath_column), {'id': self.id}) - _, fb = get_mountpoint(self._table, conn_handler)[0] + _, fb = get_mountpoint(self._table)[0] base_fp = partial(join, fb) - return [(fpid, base_fp(fp), convert_from_id(fid, "filepath_type", - conn_handler)) for fpid, fp, fid in db_paths] + return [(fpid, base_fp(fp), convert_from_id(fid, "filepath_type")) + for fpid, fp, fid in db_paths] def get_filepath_ids(self): self._check_subclass() @@ -579,8 +579,7 @@ def clear_filepaths(self): # Move the files, if they are not used, if you get to this point # self.studies should only have one element, thus self.studies[0] - move_filepaths_to_upload_folder(self.studies[0], filepaths, - conn_handler=conn_handler) + move_filepaths_to_upload_folder(self.studies[0], filepaths) def remove_filepath(self, fp): """Removes the filepath from the RawData @@ -614,7 +613,7 @@ def remove_filepath(self, fp): self._set_link_filepaths_status("idle") # Delete the files, if they are not used anywhere - purge_filepaths(conn_handler) + purge_filepaths() def status(self, study): """The status of the raw data within the given study @@ -753,7 +752,7 @@ def create(cls, study, preprocessed_params_table, preprocessed_params_id, data_type = prep_template.data_type(ret_id=True) else: # only data_type, so need id from the text - data_type = convert_to_id(data_type, "data_type", conn_handler) + data_type = convert_to_id(data_type, "data_type") # Check that the preprocessed_params_table exists if not exists_dynamic_table(preprocessed_params_table, "preprocessed_", @@ -1278,7 +1277,7 @@ def create(cls, processed_params_table, processed_params_id, filepaths, "You must provide either a preprocessed_data, a " "data_type, or both") else: - data_type = convert_to_id(data_type, "data_type", conn_handler) + data_type = convert_to_id(data_type, "data_type") # We first check that the processed_params_table exists if not exists_dynamic_table(processed_params_table, @@ -1524,8 +1523,7 @@ def status(self, status): conn_handler = SQLConnectionHandler() - status_id = convert_to_id(status, 'processed_data_status', - conn_handler=conn_handler) + status_id = convert_to_id(status, 'processed_data_status') sql = """UPDATE qiita.{0} SET processed_data_status_id = %s WHERE processed_data_id=%s""".format(self._table) diff --git a/qiita_db/job.py b/qiita_db/job.py index 79ee640f0..ec040579a 100644 --- a/qiita_db/job.py +++ b/qiita_db/job.py @@ -110,7 +110,7 @@ def exists(cls, datatype, command, options, analysis, """ conn_handler = SQLConnectionHandler() # check passed arguments and grab analyses for matching jobs - datatype_id = convert_to_id(datatype, "data_type", conn_handler) + datatype_id = convert_to_id(datatype, "data_type") sql = "SELECT command_id FROM qiita.command WHERE name = %s" command_id = conn_handler.execute_fetchone(sql, (command, ))[0] opts_json = params_dict_to_json(options) @@ -238,7 +238,7 @@ def create(cls, datatype, command, options, analysis, "analysis: %s" % (datatype, command, options, analysis.id)) # Get the datatype and command ids from the strings - datatype_id = convert_to_id(datatype, "data_type", conn_handler) + datatype_id = convert_to_id(datatype, "data_type") sql = "SELECT command_id FROM qiita.command WHERE name = %s" command_id = conn_handler.execute_fetchone(sql, (command, ))[0] opts_json = params_dict_to_json(options) @@ -297,7 +297,7 @@ def options(self): "job_id = %s)".format(self._table)) db_comm = conn_handler.execute_fetchone(sql, (self._id, )) out_opt = loads(db_comm[1]) - basedir = get_db_files_base_dir(conn_handler) + basedir = get_db_files_base_dir() join_f = partial(join, join(basedir, "job")) for k in out_opt: opts[k] = join_f("%s_%s_%s" % (self._id, db_comm[0], k.strip("-"))) @@ -422,7 +422,7 @@ def add_results(self, results): conn_handler = SQLConnectionHandler() self._lock_job(conn_handler) # convert all file type text to file type ids - res_ids = [(fp, convert_to_id(fptype, "filepath_type", conn_handler)) + res_ids = [(fp, convert_to_id(fptype, "filepath_type")) for fp, fptype in results] file_ids = insert_filepaths(res_ids, self._id, self._table, "filepath", conn_handler, move_files=False) @@ -485,7 +485,7 @@ def get_commands_by_datatype(cls, datatypes=None): conn_handler = SQLConnectionHandler() # get the ids of the datatypes to get commands for if datatypes is not None: - datatype_info = [(convert_to_id(dt, "data_type", conn_handler), dt) + datatype_info = [(convert_to_id(dt, "data_type"), dt) for dt in datatypes] else: datatype_info = conn_handler.execute_fetchall( diff --git a/qiita_db/metadata_template/base_metadata_template.py b/qiita_db/metadata_template/base_metadata_template.py index 1b0d3d6e3..9f17d0bc7 100644 --- a/qiita_db/metadata_template/base_metadata_template.py +++ b/qiita_db/metadata_template/base_metadata_template.py @@ -195,7 +195,7 @@ def _get_categories(self, conn_handler): The set of all available metadata categories """ # Get all the columns - cols = get_table_cols(self._dynamic_table, conn_handler) + cols = get_table_cols(self._dynamic_table) # Remove the sample_id column as this column is used internally for # data storage and it doesn't actually belong to the metadata cols.remove('sample_id') @@ -489,7 +489,7 @@ class MetadataTemplate(QiitaObject): _id_column = None _sample_cls = None - def _check_id(self, id_, conn_handler=None): + def _check_id(self, id_): r"""Checks that the MetadataTemplate id_ exists on the database""" self._check_subclass() @@ -1013,7 +1013,7 @@ def to_dataframe(self): The metadata in the template,indexed on sample id """ conn_handler = SQLConnectionHandler() - cols = sorted(get_table_cols(self._table_name(self._id), conn_handler)) + cols = sorted(get_table_cols(self._table_name(self._id))) # Get all metadata for the template sql = "SELECT {0} FROM qiita.{1}".format(", ".join(cols), self._table_name(self.id)) @@ -1071,7 +1071,7 @@ def get_filepaths(self, conn_handler=None): info={self.__class__.__name__: self.id}) raise e - _, fb = get_mountpoint('templates', conn_handler)[0] + _, fb = get_mountpoint('templates')[0] base_fp = partial(join, fb) return [(fpid, base_fp(fp)) for fpid, fp in filepath_ids] diff --git a/qiita_db/metadata_template/prep_template.py b/qiita_db/metadata_template/prep_template.py index 2b3446bb4..d68ebc3d3 100644 --- a/qiita_db/metadata_template/prep_template.py +++ b/qiita_db/metadata_template/prep_template.py @@ -117,10 +117,9 @@ def create(cls, md_template, raw_data, study, data_type, # Check if the data_type is the id or the string if isinstance(data_type, (int, long)): data_type_id = data_type - data_type_str = convert_from_id(data_type, "data_type", - conn_handler) + data_type_str = convert_from_id(data_type, "data_type") else: - data_type_id = convert_to_id(data_type, "data_type", conn_handler) + data_type_id = convert_to_id(data_type, "data_type") data_type_str = data_type pt_cols = PREP_TEMPLATE_COLUMNS diff --git a/qiita_db/parameters.py b/qiita_db/parameters.py index 43e72eb1c..d21b5f887 100644 --- a/qiita_db/parameters.py +++ b/qiita_db/parameters.py @@ -116,15 +116,13 @@ def values(self): del result['param_set_name'] return result - def _check_id(self, id_, conn_handler=None): + def _check_id(self, id_): r"""Check that the provided ID actually exists in the database Parameters ---------- id_ : object The ID to test - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB Notes ----- diff --git a/qiita_db/reference.py b/qiita_db/reference.py index 7bad25696..5e96bcda0 100644 --- a/qiita_db/reference.py +++ b/qiita_db/reference.py @@ -70,24 +70,21 @@ def create(cls, name, version, seqs_fp, tax_fp=None, tree_fp=None): conn_handler = SQLConnectionHandler() seq_id = insert_filepaths([(seqs_fp, convert_to_id("reference_seqs", - "filepath_type", - conn_handler))], + "filepath_type"))], "%s_%s" % (name, version), "reference", "filepath", conn_handler)[0] # Check if the database has taxonomy file tax_id = None if tax_fp: - fps = [(tax_fp, convert_to_id("reference_tax", "filepath_type", - conn_handler))] + fps = [(tax_fp, convert_to_id("reference_tax", "filepath_type"))] tax_id = insert_filepaths(fps, "%s_%s" % (name, version), "reference", "filepath", conn_handler)[0] # Check if the database has tree file tree_id = None if tree_fp: - fps = [(tree_fp, convert_to_id("reference_tree", "filepath_type", - conn_handler))] + fps = [(tree_fp, convert_to_id("reference_tree", "filepath_type"))] tree_id = insert_filepaths(fps, "%s_%s" % (name, version), "reference", "filepath", conn_handler)[0] @@ -129,7 +126,7 @@ def name(self): return conn_handler.execute_fetchone( "SELECT reference_name FROM qiita.{0} WHERE " "reference_id = %s".format(self._table), (self._id,))[0] - _, basefp = get_mountpoint('reference', conn_handler=conn_handler)[0] + _, basefp = get_mountpoint('reference')[0] @property def version(self): @@ -137,7 +134,7 @@ def version(self): return conn_handler.execute_fetchone( "SELECT reference_version FROM qiita.{0} WHERE " "reference_id = %s".format(self._table), (self._id,))[0] - _, basefp = get_mountpoint('reference', conn_handler=conn_handler)[0] + _, basefp = get_mountpoint('reference')[0] @property def sequence_fp(self): @@ -146,7 +143,7 @@ def sequence_fp(self): "SELECT f.filepath FROM qiita.filepath f JOIN qiita.{0} r ON " "r.sequence_filepath=f.filepath_id WHERE " "r.reference_id=%s".format(self._table), (self._id,))[0] - _, basefp = get_mountpoint('reference', conn_handler=conn_handler)[0] + _, basefp = get_mountpoint('reference')[0] return join(basefp, rel_path) @property @@ -156,7 +153,7 @@ def taxonomy_fp(self): "SELECT f.filepath FROM qiita.filepath f JOIN qiita.{0} r ON " "r.taxonomy_filepath=f.filepath_id WHERE " "r.reference_id=%s".format(self._table), (self._id,))[0] - _, basefp = get_mountpoint('reference', conn_handler=conn_handler)[0] + _, basefp = get_mountpoint('reference')[0] return join(basefp, rel_path) @property @@ -166,5 +163,5 @@ def tree_fp(self): "SELECT f.filepath FROM qiita.filepath f JOIN qiita.{0} r ON " "r.tree_filepath=f.filepath_id WHERE " "r.reference_id=%s".format(self._table), (self._id,))[0] - _, basefp = get_mountpoint('reference', conn_handler=conn_handler)[0] + _, basefp = get_mountpoint('reference')[0] return join(basefp, rel_path) diff --git a/qiita_db/study.py b/qiita_db/study.py index 4f0ff3e5b..b0e086b8b 100644 --- a/qiita_db/study.py +++ b/qiita_db/study.py @@ -707,8 +707,7 @@ def environmental_packages(self, values): raise TypeError('Environmental packages should be a list') # Get all the environmental packages - env_pkgs = [pkg[0] for pkg in get_environmental_packages( - conn_handler=conn_handler)] + env_pkgs = [pkg[0] for pkg in get_environmental_packages()] # Check that all the passed values are valid environmental packages missing = set(values).difference(env_pkgs) diff --git a/qiita_db/support_files/patches/python_patches/15.py b/qiita_db/support_files/patches/python_patches/15.py index ded9351a7..25a620948 100644 --- a/qiita_db/support_files/patches/python_patches/15.py +++ b/qiita_db/support_files/patches/python_patches/15.py @@ -14,7 +14,7 @@ # retrieve relative filepaths as dictionary for matching mountpoints = {m[1].rstrip('/\\'): m[0] for m in get_mountpoint( - 'analysis', conn_handler=conn_handler, retrieve_all=True)} + 'analysis', retrieve_all=True)} for filepath in filepaths: filename = basename(filepath['filepath']) diff --git a/qiita_db/test/test_analysis.py b/qiita_db/test/test_analysis.py index f923c98c6..911c4b6af 100644 --- a/qiita_db/test/test_analysis.py +++ b/qiita_db/test/test_analysis.py @@ -388,8 +388,7 @@ def test_get_samples(self): def test_build_mapping_file(self): new_id = get_count('qiita.filepath') + 1 samples = {1: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']} - self.analysis._build_mapping_file(samples, - conn_handler=self.conn_handler) + self.analysis._build_mapping_file(samples) obs = self.analysis.mapping_file self.assertEqual(obs, self.map_fp) @@ -419,8 +418,7 @@ def test_build_mapping_file(self): def test_build_mapping_file_duplicate_samples(self): samples = {1: ['1.SKB8.640193', '1.SKB8.640193', '1.SKD8.640184']} with self.assertRaises(QiitaDBError): - self.analysis._build_mapping_file(samples, - conn_handler=self.conn_handler) + self.analysis._build_mapping_file(samples) def test_build_biom_tables(self): new_id = get_count('qiita.filepath') + 1 diff --git a/qiita_db/test/test_util.py b/qiita_db/test/test_util.py index df91799b8..6781465cc 100644 --- a/qiita_db/test/test_util.py +++ b/qiita_db/test/test_util.py @@ -113,15 +113,14 @@ def test_check_table_cols_fail(self): self.table) def test_get_table_cols(self): - obs = get_table_cols("qiita_user", self.conn_handler) + obs = get_table_cols("qiita_user") exp = {"email", "user_level_id", "password", "name", "affiliation", "address", "phone", "user_verify_code", "pass_reset_code", "pass_reset_timestamp"} self.assertEqual(set(obs), exp) def test_get_table_cols_w_type(self): - obs = get_table_cols_w_type("preprocessed_sequence_illumina_params", - self.conn_handler) + obs = get_table_cols_w_type("preprocessed_sequence_illumina_params") exp = [['param_set_name', 'character varying'], ['preprocessed_params_id', 'bigint'], ['max_bad_run_length', 'integer'], @@ -378,7 +377,7 @@ def _common_purge_filpeaths_test(self): exp_count = get_count("qiita.filepath") - 2 - purge_filepaths(self.conn_handler) + purge_filepaths() obs_count = get_count("qiita.filepath") @@ -433,7 +432,7 @@ def test_move_filepaths_to_upload_folder(self): "DELETE FROM qiita.raw_filepath WHERE filepath_id=%s", (fid,)) # moving filepaths - move_filepaths_to_upload_folder(study_id, filepaths, self.conn_handler) + move_filepaths_to_upload_folder(study_id, filepaths) # check that they do not exist in the old path but do in the new one path_for_removal = join(get_mountpoint("uploads")[0][1], str(study_id)) diff --git a/qiita_db/user.py b/qiita_db/user.py index ab1a3d05c..4cae5f5e1 100644 --- a/qiita_db/user.py +++ b/qiita_db/user.py @@ -65,7 +65,7 @@ class User(QiitaObject): # The following columns are considered not part of the user info _non_info = {"email", "user_level_id", "password"} - def _check_id(self, id_, conn_handler=None): + def _check_id(self, id_): r"""Check that the provided ID actually exists in the database Parameters diff --git a/qiita_db/util.py b/qiita_db/util.py index 53b6a25ee..3a990f940 100644 --- a/qiita_db/util.py +++ b/qiita_db/util.py @@ -326,15 +326,13 @@ def check_table_cols(conn_handler, keys, table): set(keys).difference(cols)) -def get_table_cols(table, conn_handler=None): +def get_table_cols(table): """Returns the column headers of table Parameters ---------- table : str The table name - conn_handler : SQLConnectionHandler, optional - The connection handler object connected to the DB Returns ------- @@ -348,7 +346,7 @@ def get_table_cols(table, conn_handler=None): return [h[0] for h in headers] -def get_table_cols_w_type(table, conn_handler=None): +def get_table_cols_w_type(table): """Returns the column headers and its type Parameters @@ -404,7 +402,7 @@ def exists_dynamic_table(table, prefix, suffix, conn_handler): exists_table(table, conn_handler)) -def get_db_files_base_dir(conn_handler=None): +def get_db_files_base_dir(): r"""Returns the path to the base directory of all db files Returns @@ -418,7 +416,7 @@ def get_db_files_base_dir(conn_handler=None): "SELECT base_data_dir FROM settings")[0] -def get_work_base_dir(conn_handler=None): +def get_work_base_dir(): r"""Returns the path to the base directory of all db files Returns @@ -538,15 +536,13 @@ def move_upload_files_to_trash(study_id, files_to_move): rename(fullpath, new_fullpath) -def get_mountpoint(mount_type, conn_handler=None, retrieve_all=False): +def get_mountpoint(mount_type, retrieve_all=False): r""" Returns the most recent values from data directory for the given type Parameters ---------- mount_type : str The data mount type - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB retrieve_all : bool Retrieve all the available mount points or just the active one @@ -571,15 +567,13 @@ def get_mountpoint(mount_type, conn_handler=None, retrieve_all=False): return [(d, join(basedir, m, s)) for d, m, s in result] -def get_mountpoint_path_by_id(mount_id, conn_handler=None): +def get_mountpoint_path_by_id(mount_id): r""" Returns the mountpoint path for the mountpoint with id = mount_id Parameters ---------- mount_id : int The mountpoint id - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB Returns ------- @@ -627,7 +621,7 @@ def insert_filepaths(filepaths, obj_id, table, filepath_table, conn_handler, """ new_filepaths = filepaths - dd_id, mp = get_mountpoint(table, conn_handler)[0] + dd_id, mp = get_mountpoint(table)[0] base_fp = join(get_db_files_base_dir(), mp) if move_files: @@ -644,7 +638,7 @@ def insert_filepaths(filepaths, obj_id, table, filepath_table, conn_handler, def str_to_id(x): return (x if isinstance(x, (int, long)) - else convert_to_id(x, "filepath_type", conn_handler)) + else convert_to_id(x, "filepath_type")) paths_w_checksum = [(relpath(path, base_fp), str_to_id(id), compute_checksum(path)) for path, id in new_filepaths] @@ -667,7 +661,7 @@ def str_to_id(x): return [id[0] for id in ids] -def purge_filepaths(conn_handler=None): +def purge_filepaths(): r"""Goes over the filepath table and remove all the filepaths that are not used in any place @@ -720,7 +714,7 @@ def purge_filepaths(conn_handler=None): remove(fp) -def move_filepaths_to_upload_folder(study_id, filepaths, conn_handler=None): +def move_filepaths_to_upload_folder(study_id, filepaths): r"""Goes over the filepaths list and moves all the filepaths that are not used in any place to the upload folder of the study @@ -730,8 +724,6 @@ def move_filepaths_to_upload_folder(study_id, filepaths, conn_handler=None): The study id to where the files should be returned to filepaths : list List of filepaths to move to the upload folder - conn_handler : SQLConnectionHandler, optional - The connection handler object connected to the DB """ conn_handler = SQLConnectionHandler() uploads_fp = join(get_mountpoint("uploads")[0][1], str(study_id)) @@ -765,7 +757,7 @@ def get_filepath_id(table, fp, conn_handler): QiitaDBError If fp is not stored in the DB. """ - _, mp = get_mountpoint(table, conn_handler)[0] + _, mp = get_mountpoint(table)[0] base_fp = join(get_db_files_base_dir(), mp) fp_id = conn_handler.execute_fetchone( @@ -826,7 +818,7 @@ def filepath_ids_to_rel_paths(filepath_ids): return {} -def convert_to_id(value, table, conn_handler=None): +def convert_to_id(value, table): """Converts a string value to its corresponding table identifier Parameters @@ -835,8 +827,6 @@ def convert_to_id(value, table, conn_handler=None): The string value to convert table : str The table that has the conversion - conn_handler : SQLConnectionHandler, optional - The sql connection object Returns ------- @@ -857,7 +847,7 @@ def convert_to_id(value, table, conn_handler=None): return _id[0] -def convert_from_id(value, table, conn_handler=None): +def convert_from_id(value, table): """Converts an id value to its corresponding string value Parameters @@ -974,14 +964,9 @@ def get_lat_longs(): return result -def get_environmental_packages(conn_handler=None): +def get_environmental_packages(): """Get the list of available environmental packages - Parameters - ---------- - conn_handler : SQLConnectionHandler, optional - The handler connected to the database - Returns ------- list of (str, str) @@ -994,7 +979,7 @@ def get_environmental_packages(conn_handler=None): "SELECT * FROM qiita.environmental_package") -def get_timeseries_types(conn_handler=None): +def get_timeseries_types(): """Get the list of available timeseries types Parameters From fcab3418cfe60283773cc9431e2ef3cc695f5100 Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Fri, 1 May 2015 06:14:53 -0600 Subject: [PATCH 3/4] removing conn_handler from def and calls - 2 --- qiita_db/analysis.py | 16 +++------ qiita_db/base.py | 4 +-- qiita_db/data.py | 19 ++++------- .../base_metadata_template.py | 4 +-- qiita_db/metadata_template/prep_template.py | 2 +- qiita_db/parameters.py | 9 +++-- qiita_db/test/test_analysis.py | 6 ++-- qiita_db/test/test_base.py | 34 ------------------- qiita_db/test/test_study.py | 3 +- qiita_db/test/test_util.py | 4 +-- qiita_db/user.py | 4 +-- qiita_db/util.py | 5 ++- 12 files changed, 29 insertions(+), 81 deletions(-) diff --git a/qiita_db/analysis.py b/qiita_db/analysis.py index de415eaba..2c9cecf1a 100644 --- a/qiita_db/analysis.py +++ b/qiita_db/analysis.py @@ -655,11 +655,9 @@ def build_files(self, rarefaction_depth=None): if rarefaction_depth <= 0: raise ValueError("rarefaction_depth must be greater than 0") - conn_handler = SQLConnectionHandler() samples = self._get_samples() self._build_mapping_file(samples) - self._build_biom_tables(samples, rarefaction_depth, - conn_handler=conn_handler) + self._build_biom_tables(samples, rarefaction_depth) def _get_samples(self): """Retrieves dict of samples to proc_data_id for the analysis""" @@ -669,8 +667,7 @@ def _get_samples(self): "GROUP BY processed_data_id") return dict(conn_handler.execute_fetchall(sql, [self._id])) - def _build_biom_tables(self, samples, rarefaction_depth, - conn_handler=None): + def _build_biom_tables(self, samples, rarefaction_depth): """Build tables and add them to the analysis""" # filter and combine all study BIOM tables needed for each data type new_tables = {dt: None for dt in self.data_types} @@ -700,7 +697,6 @@ def _build_biom_tables(self, samples, rarefaction_depth, new_tables[data_type] = new_tables[data_type].merge(table) # add the new tables to the analysis - conn_handler = SQLConnectionHandler() _, base_fp = get_mountpoint(self._table)[0] for dt, biom_table in viewitems(new_tables): # rarefy, if specified @@ -712,7 +708,7 @@ def _build_biom_tables(self, samples, rarefaction_depth, biom_table.to_hdf5(f, "Analysis %s Datatype %s" % (self._id, dt)) self._add_file("%d_analysis_%s.biom" % (self._id, dt), - "biom", data_type=dt, conn_handler=conn_handler) + "biom", data_type=dt) def _build_mapping_file(self, samples): """Builds the combined mapping file for all samples @@ -773,10 +769,9 @@ def _build_mapping_file(self, samples): merged_map.to_csv(mapping_fp, index_label='#SampleID', na_rep='unknown', sep='\t') - self._add_file("%d_analysis_mapping.txt" % self._id, - "plain_text", conn_handler=conn_handler) + self._add_file("%d_analysis_mapping.txt" % self._id, "plain_text") - def _add_file(self, filename, filetype, data_type=None, conn_handler=None): + def _add_file(self, filename, filetype, data_type=None): """adds analysis item to database Parameters @@ -785,7 +780,6 @@ def _add_file(self, filename, filetype, data_type=None, conn_handler=None): filename to add to analysis filetype : {plain_text, biom} data_type : str, optional - conn_handler : SQLConnectionHandler object, optional """ conn_handler = SQLConnectionHandler() diff --git a/qiita_db/base.py b/qiita_db/base.py index 9773fb078..35cd3a2fc 100644 --- a/qiita_db/base.py +++ b/qiita_db/base.py @@ -226,7 +226,7 @@ def status(self, status): "(SELECT {0}_status_id FROM qiita.{0}_status WHERE status = %s) " "WHERE {0}_id = %s".format(self._table), (status, self._id)) - def check_status(self, status, exclude=False, conn_handler=None): + def check_status(self, status, exclude=False): r"""Checks status of object. Parameters @@ -236,8 +236,6 @@ def check_status(self, status, exclude=False, conn_handler=None): exclude: bool, optional If True, will check that database status is NOT one of the statuses passed. Default False. - conn_handler: SQLConnectionHandler, optional - The connection handler object connected to the DB Returns ------- diff --git a/qiita_db/data.py b/qiita_db/data.py index 469e905d4..85f7114d4 100644 --- a/qiita_db/data.py +++ b/qiita_db/data.py @@ -139,7 +139,7 @@ def _link_data_filepaths(self, fp_ids, conn_handler): "VALUES (%s, %s)".format(self._data_filepath_table, self._data_filepath_column), values) - def add_filepaths(self, filepaths, conn_handler=None): + def add_filepaths(self, filepaths): r"""Populates the DB tables for storing the filepaths and connects the `self` objects with these filepaths""" # Check that this function has been called from a subclass @@ -326,7 +326,7 @@ def create(cls, filetype, studies, filepaths=None): # If file paths have been provided, add them to the raw data object if filepaths: - rd.add_filepaths(filepaths, conn_handler) + rd.add_filepaths(filepaths) return rd @@ -465,14 +465,9 @@ def prep_templates(self): "WHERE raw_data_id = %s ORDER BY prep_template_id") return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] - def _is_preprocessed(self, conn_handler=None): + def _is_preprocessed(self): """Returns whether the RawData has been preprocessed or not - Parameters - ---------- - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB - Returns ------- bool @@ -507,7 +502,7 @@ def _remove_filepath(self, fp, conn_handler, queue): """ # If the RawData has been already preprocessed, we cannot remove any # file - raise an error - if self._is_preprocessed(conn_handler): + if self._is_preprocessed(): msg = ("Cannot clear all the filepaths from raw data %s, it has " "been already preprocessed" % self._id) self._set_link_filepaths_status("failed: %s" % msg) @@ -525,7 +520,7 @@ def _remove_filepath(self, fp, conn_handler, queue): raise QiitaDBError(msg) # Get the filpeath id - fp_id = get_filepath_id(self._table, fp, conn_handler) + fp_id = get_filepath_id(self._table, fp) fp_is_mine = conn_handler.execute_fetchone( "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE filepath_id=%s AND " "{1}=%s)".format(self._data_filepath_table, @@ -803,7 +798,7 @@ def create(cls, study, preprocessed_params_table, preprocessed_params_id, conn_handler.execute_queue(q) # Add the filepaths to the database and connect them - ppd.add_filepaths(filepaths, conn_handler) + ppd.add_filepaths(filepaths) return ppd @classmethod @@ -1320,7 +1315,7 @@ def create(cls, processed_params_table, processed_params_id, filepaths, "(%s, %s)".format(cls._study_processed_table), (study_id, pd_id)) - pd.add_filepaths(filepaths, conn_handler) + pd.add_filepaths(filepaths) return cls(pd_id) @classmethod diff --git a/qiita_db/metadata_template/base_metadata_template.py b/qiita_db/metadata_template/base_metadata_template.py index 9f17d0bc7..61beccd7f 100644 --- a/qiita_db/metadata_template/base_metadata_template.py +++ b/qiita_db/metadata_template/base_metadata_template.py @@ -1025,7 +1025,7 @@ def to_dataframe(self): return df - def add_filepath(self, filepath, conn_handler=None, fp_id=None): + def add_filepath(self, filepath, fp_id=None): r"""Populates the DB tables for storing the filepath and connects the `self` objects with this filepath""" # Check that this function has been called from a subclass @@ -1050,7 +1050,7 @@ def add_filepath(self, filepath, conn_handler=None, fp_id=None): info={self.__class__.__name__: self.id}) raise e - def get_filepaths(self, conn_handler=None): + def get_filepaths(self): r"""Retrieves the list of (filepath_id, filepath)""" # Check that this function has been called from a subclass self._check_subclass() diff --git a/qiita_db/metadata_template/prep_template.py b/qiita_db/metadata_template/prep_template.py index d68ebc3d3..2d6bdceb6 100644 --- a/qiita_db/metadata_template/prep_template.py +++ b/qiita_db/metadata_template/prep_template.py @@ -495,7 +495,7 @@ def create_qiime_mapping_file(self): # adding the fp to the object self.add_filepath( - filepath, conn_handler=conn_handler, + filepath, fp_id=convert_to_id("qiime_map", "filepath_type")) return filepath diff --git a/qiita_db/parameters.py b/qiita_db/parameters.py index d21b5f887..eba0b8341 100644 --- a/qiita_db/parameters.py +++ b/qiita_db/parameters.py @@ -138,8 +138,9 @@ def _check_id(self, id_): self._table, self._column_id), (id_, ))[0] - def _get_values_as_dict(self, conn_handler): + def _get_values_as_dict(self): r"""""" + conn_handler = SQLConnectionHandler() return dict(conn_handler.execute_fetchone( "SELECT * FROM qiita.{0} WHERE {1}=%s".format( self._table, self._column_id), (self.id,))) @@ -152,11 +153,10 @@ def to_str(self): str The string with all the parameters """ - conn_handler = SQLConnectionHandler() table_cols = get_table_cols_w_type(self._table) table_cols.remove([self._column_id, 'bigint']) - values = self._get_values_as_dict(conn_handler=conn_handler) + values = self._get_values_as_dict() result = [] for p_name, p_type in sorted(table_cols): @@ -212,8 +212,7 @@ def to_file(self, f): File-like object to write the parameters. Should support the write operation """ - conn_handler = SQLConnectionHandler() - values = self._get_values_as_dict(conn_handler) + values = self._get_values_as_dict() # Remove the id column del values[self._column_id] diff --git a/qiita_db/test/test_analysis.py b/qiita_db/test/test_analysis.py index 911c4b6af..bdccbb24e 100644 --- a/qiita_db/test/test_analysis.py +++ b/qiita_db/test/test_analysis.py @@ -237,8 +237,7 @@ def test_retrieve_dropped_samples(self): samples = {1: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196'], 2: ['2.SKB8.640193', '2.SKD8.640184']} - self.analysis._build_biom_tables(samples, 10000, - conn_handler=self.conn_handler) + self.analysis._build_biom_tables(samples, 10000) exp = {1: {'1.SKM4.640180', '1.SKM9.640192'}, 2: {'2.SKB7.640196'}} self.assertEqual(self.analysis.dropped_samples, exp) @@ -423,8 +422,7 @@ def test_build_mapping_file_duplicate_samples(self): def test_build_biom_tables(self): new_id = get_count('qiita.filepath') + 1 samples = {1: ['1.SKB8.640193', '1.SKD8.640184', '1.SKB7.640196']} - self.analysis._build_biom_tables(samples, 100, - conn_handler=self.conn_handler) + self.analysis._build_biom_tables(samples, 100) obs = self.analysis.biom_tables self.assertEqual(obs, {'18S': self.biom_fp}) diff --git a/qiita_db/test/test_base.py b/qiita_db/test/test_base.py index 26ef52931..1f439d6c1 100644 --- a/qiita_db/test/test_base.py +++ b/qiita_db/test/test_base.py @@ -87,46 +87,27 @@ def test_status(self): def test_check_status_single(self): """check_status works passing a single status""" self.assertTrue(self.tester.check_status(["in_construction"])) - self.assertTrue(self.tester.check_status(["in_construction"], - conn_handler=self.conn_handler)) self.assertFalse(self.tester.check_status(["queued"])) - self.assertFalse(self.tester.check_status(["queued"], - conn_handler=self.conn_handler)) def test_check_status_exclude_single(self): """check_status works passing a single status and the exclude flag""" self.assertTrue(self.tester.check_status(["public"], exclude=True)) - self.assertTrue(self.tester.check_status(["public"], exclude=True, - conn_handler=self.conn_handler)) self.assertFalse(self.tester.check_status(["in_construction"], exclude=True)) - self.assertFalse(self.tester.check_status(["in_construction"], - exclude=True, conn_handler=self.conn_handler)) def test_check_status_list(self): """check_status work passing a list of status""" self.assertTrue(self.tester.check_status( ["in_construction", "queued"])) - self.assertTrue(self.tester.check_status( - ["in_construction", "queued"], - conn_handler=self.conn_handler)) self.assertFalse(self.tester.check_status( ["public", "queued"])) - self.assertFalse(self.tester.check_status( - ["public", "queued"], conn_handler=self.conn_handler)) def test_check_status_exclude_list(self): """check_status work passing a list of status and the exclude flag""" self.assertTrue(self.tester.check_status( ["public", "queued"], exclude=True)) - self.assertTrue(self.tester.check_status( - ["public", "queued"], exclude=True, - conn_handler=self.conn_handler)) self.assertFalse(self.tester.check_status( ["in_construction", "queued"], exclude=True)) - self.assertFalse(self.tester.check_status( - ["in_construction", "queued"], exclude=True, - conn_handler=self.conn_handler)) def test_check_status_unknown_status(self): """check_status raises an error if an invalid status is provided""" @@ -136,13 +117,6 @@ def test_check_status_unknown_status(self): with self.assertRaises(ValueError): self.tester.check_status(["foo"], exclude=True) - with self.assertRaises(ValueError): - self.tester.check_status(["foo"], conn_handler=self.conn_handler) - - with self.assertRaises(ValueError): - self.tester.check_status(["foo"], exclude=True, - conn_handler=self.conn_handler) - def test_check_status_unknown_status_list(self): """check_status raises an error if an invalid status list is provided """ @@ -152,13 +126,5 @@ def test_check_status_unknown_status_list(self): with self.assertRaises(ValueError): self.tester.check_status(["foo", "bar"], exclude=True) - with self.assertRaises(ValueError): - self.tester.check_status(["foo", "bar"], - conn_handler=self.conn_handler) - - with self.assertRaises(ValueError): - self.tester.check_status(["foo", "bar"], exclude=True, - conn_handler=self.conn_handler) - if __name__ == '__main__': main() diff --git a/qiita_db/test/test_study.py b/qiita_db/test/test_study.py index a235c3cce..96f86eb93 100644 --- a/qiita_db/test/test_study.py +++ b/qiita_db/test/test_study.py @@ -175,8 +175,7 @@ def setUp(self): def _change_processed_data_status(self, new_status): # Change the status of the studies by changing the status of their # processed data - id_status = convert_to_id(new_status, 'processed_data_status', - self.conn_handler) + id_status = convert_to_id(new_status, 'processed_data_status') self.conn_handler.execute( "UPDATE qiita.processed_data SET processed_data_status_id = %s", (id_status,)) diff --git a/qiita_db/test/test_util.py b/qiita_db/test/test_util.py index 6781465cc..6861b9f74 100644 --- a/qiita_db/test/test_util.py +++ b/qiita_db/test/test_util.py @@ -446,12 +446,12 @@ def test_move_filepaths_to_upload_folder(self): def test_get_filepath_id(self): _, base = get_mountpoint("raw_data")[0] fp = join(base, '1_s_G1_L001_sequences.fastq.gz') - obs = get_filepath_id("raw_data", fp, self.conn_handler) + obs = get_filepath_id("raw_data", fp) self.assertEqual(obs, 1) def test_get_filepath_id_error(self): with self.assertRaises(QiitaDBError): - get_filepath_id("raw_data", "Not_a_path", self.conn_handler) + get_filepath_id("raw_data", "Not_a_path") def test_get_mountpoint(self): exp = [(5, join(get_db_files_base_dir(), 'raw_data', ''))] diff --git a/qiita_db/user.py b/qiita_db/user.py index 4cae5f5e1..31ed37914 100644 --- a/qiita_db/user.py +++ b/qiita_db/user.py @@ -415,7 +415,7 @@ def change_password(self, oldpass, newpass): "SELECT password FROM qiita.{0} WHERE email = %s".format( self._table), (self._id, ))[0] if dbpass == hash_password(oldpass, dbpass): - self._change_pass(newpass, conn_handler=conn_handler) + self._change_pass(newpass) return True return False @@ -448,7 +448,7 @@ def change_forgot_password(self, code, newpass): return True return False - def _change_pass(self, newpass, conn_handler=None): + def _change_pass(self, newpass): if not validate_password(newpass): raise IncorrectPasswordError("Bad password given!") diff --git a/qiita_db/util.py b/qiita_db/util.py index 3a990f940..33b38af6b 100644 --- a/qiita_db/util.py +++ b/qiita_db/util.py @@ -740,7 +740,7 @@ def move_filepaths_to_upload_folder(study_id, filepaths): move(fp, destination) -def get_filepath_id(table, fp, conn_handler): +def get_filepath_id(table, fp): """Return the filepath_id of fp Parameters @@ -749,14 +749,13 @@ def get_filepath_id(table, fp, conn_handler): The table type so we can search on this one fp : str The filepath - conn_handler : SQLConnectionHandler - The sql connection object Raises ------ QiitaDBError If fp is not stored in the DB. """ + conn_handler = SQLConnectionHandler() _, mp = get_mountpoint(table)[0] base_fp = join(get_db_files_base_dir(), mp) From 2de4acd89b7c9d26bc53457aee5cf3fdf7a7e56a Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Fri, 1 May 2015 06:19:54 -0600 Subject: [PATCH 4/4] rm leftover docstrings --- qiita_db/user.py | 2 -- qiita_db/util.py | 17 +---------------- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/qiita_db/user.py b/qiita_db/user.py index 31ed37914..58a570469 100644 --- a/qiita_db/user.py +++ b/qiita_db/user.py @@ -72,8 +72,6 @@ def _check_id(self, id_): ---------- id_ : object The ID to test - conn_handler : SQLConnectionHandler - The connection handler object connected to the DB Notes ----- diff --git a/qiita_db/util.py b/qiita_db/util.py index 33b38af6b..7ffb53d9d 100644 --- a/qiita_db/util.py +++ b/qiita_db/util.py @@ -353,8 +353,6 @@ def get_table_cols_w_type(table): ---------- table : str The table name - conn_handler : SQLConnectionHandler, optional - The connection handler object connected to the db Returns ------- @@ -663,13 +661,7 @@ def str_to_id(x): def purge_filepaths(): r"""Goes over the filepath table and remove all the filepaths that are not - used in any place - - Parameters - ---------- - conn_handler : SQLConnectionHandler, optional - The connection handler object connected to the DB - """ + used in any place""" conn_handler = SQLConnectionHandler() # Get all the (table, column) pairs that reference to the filepath table @@ -855,8 +847,6 @@ def convert_from_id(value, table): The id value to convert table : str The table that has the conversion - conn_handler : SQLConnectionHandler, optional - The sql connection object Returns ------- @@ -981,11 +971,6 @@ def get_environmental_packages(): def get_timeseries_types(): """Get the list of available timeseries types - Parameters - ---------- - conn_handler : SQLConnectionHandler, optional - The handler connected to the database - Returns ------- list of (int, str, str)