Skip to content

Commit 01ac199

Browse files
committed
Merge pull request #1140 from antgonza/fix-1136
Fix 1136
2 parents ef7955c + 2de4acd commit 01ac199

File tree

16 files changed

+125
-227
lines changed

16 files changed

+125
-227
lines changed

qiita_db/analysis.py

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -655,23 +655,19 @@ def build_files(self, rarefaction_depth=None):
655655
if rarefaction_depth <= 0:
656656
raise ValueError("rarefaction_depth must be greater than 0")
657657

658-
conn_handler = SQLConnectionHandler()
659-
samples = self._get_samples(conn_handler=conn_handler)
660-
self._build_mapping_file(samples, conn_handler=conn_handler)
661-
self._build_biom_tables(samples, rarefaction_depth,
662-
conn_handler=conn_handler)
658+
samples = self._get_samples()
659+
self._build_mapping_file(samples)
660+
self._build_biom_tables(samples, rarefaction_depth)
663661

664-
def _get_samples(self, conn_handler=None):
662+
def _get_samples(self):
665663
"""Retrieves dict of samples to proc_data_id for the analysis"""
666-
conn_handler = conn_handler if conn_handler is not None \
667-
else SQLConnectionHandler()
664+
conn_handler = SQLConnectionHandler()
668665
sql = ("SELECT processed_data_id, array_agg(sample_id ORDER BY "
669666
"sample_id) FROM qiita.analysis_sample WHERE analysis_id = %s "
670667
"GROUP BY processed_data_id")
671668
return dict(conn_handler.execute_fetchall(sql, [self._id]))
672669

673-
def _build_biom_tables(self, samples, rarefaction_depth,
674-
conn_handler=None):
670+
def _build_biom_tables(self, samples, rarefaction_depth):
675671
"""Build tables and add them to the analysis"""
676672
# filter and combine all study BIOM tables needed for each data type
677673
new_tables = {dt: None for dt in self.data_types}
@@ -701,8 +697,6 @@ def _build_biom_tables(self, samples, rarefaction_depth,
701697
new_tables[data_type] = new_tables[data_type].merge(table)
702698

703699
# add the new tables to the analysis
704-
conn_handler = conn_handler if conn_handler is not None \
705-
else SQLConnectionHandler()
706700
_, base_fp = get_mountpoint(self._table)[0]
707701
for dt, biom_table in viewitems(new_tables):
708702
# rarefy, if specified
@@ -714,14 +708,12 @@ def _build_biom_tables(self, samples, rarefaction_depth,
714708
biom_table.to_hdf5(f, "Analysis %s Datatype %s" %
715709
(self._id, dt))
716710
self._add_file("%d_analysis_%s.biom" % (self._id, dt),
717-
"biom", data_type=dt, conn_handler=conn_handler)
711+
"biom", data_type=dt)
718712

719-
def _build_mapping_file(self, samples, conn_handler=None):
713+
def _build_mapping_file(self, samples):
720714
"""Builds the combined mapping file for all samples
721715
Code modified slightly from qiime.util.MetadataMap.__add__"""
722-
conn_handler = conn_handler if conn_handler is not None \
723-
else SQLConnectionHandler()
724-
716+
conn_handler = SQLConnectionHandler()
725717
all_sample_ids = set()
726718
sql = """SELECT filepath_id, filepath
727719
FROM qiita.filepath
@@ -777,10 +769,9 @@ def _build_mapping_file(self, samples, conn_handler=None):
777769
merged_map.to_csv(mapping_fp, index_label='#SampleID',
778770
na_rep='unknown', sep='\t')
779771

780-
self._add_file("%d_analysis_mapping.txt" % self._id,
781-
"plain_text", conn_handler=conn_handler)
772+
self._add_file("%d_analysis_mapping.txt" % self._id, "plain_text")
782773

783-
def _add_file(self, filename, filetype, data_type=None, conn_handler=None):
774+
def _add_file(self, filename, filetype, data_type=None):
784775
"""adds analysis item to database
785776
786777
Parameters
@@ -789,13 +780,11 @@ def _add_file(self, filename, filetype, data_type=None, conn_handler=None):
789780
filename to add to analysis
790781
filetype : {plain_text, biom}
791782
data_type : str, optional
792-
conn_handler : SQLConnectionHandler object, optional
793783
"""
794-
conn_handler = conn_handler if conn_handler is not None \
795-
else SQLConnectionHandler()
784+
conn_handler = SQLConnectionHandler()
796785

797-
filetype_id = convert_to_id(filetype, 'filepath_type', conn_handler)
798-
_, mp = get_mountpoint('analysis', conn_handler)[0]
786+
filetype_id = convert_to_id(filetype, 'filepath_type')
787+
_, mp = get_mountpoint('analysis')[0]
799788
fpid = insert_filepaths([
800789
(join(mp, filename), filetype_id)], -1, 'analysis', 'filepath',
801790
conn_handler, move_files=False)[0]

qiita_db/base.py

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -113,15 +113,13 @@ def _check_subclass(cls):
113113
raise IncompetentQiitaDeveloperError(
114114
"Could not instantiate an object of the base class")
115115

116-
def _check_id(self, id_, conn_handler=None):
116+
def _check_id(self, id_):
117117
r"""Check that the provided ID actually exists on the database
118118
119119
Parameters
120120
----------
121121
id_ : object
122122
The ID to test
123-
conn_handler : SQLConnectionHandler
124-
The connection handler object connected to the DB
125123
126124
Notes
127125
-----
@@ -132,8 +130,7 @@ def _check_id(self, id_, conn_handler=None):
132130
"""
133131
self._check_subclass()
134132

135-
conn_handler = (conn_handler if conn_handler is not None
136-
else SQLConnectionHandler())
133+
conn_handler = SQLConnectionHandler()
137134

138135
return conn_handler.execute_fetchone(
139136
"SELECT EXISTS(SELECT * FROM qiita.{0} WHERE "
@@ -229,7 +226,7 @@ def status(self, status):
229226
"(SELECT {0}_status_id FROM qiita.{0}_status WHERE status = %s) "
230227
"WHERE {0}_id = %s".format(self._table), (status, self._id))
231228

232-
def check_status(self, status, exclude=False, conn_handler=None):
229+
def check_status(self, status, exclude=False):
233230
r"""Checks status of object.
234231
235232
Parameters
@@ -239,8 +236,6 @@ def check_status(self, status, exclude=False, conn_handler=None):
239236
exclude: bool, optional
240237
If True, will check that database status is NOT one of the statuses
241238
passed. Default False.
242-
conn_handler: SQLConnectionHandler, optional
243-
The connection handler object connected to the DB
244239
245240
Returns
246241
-------
@@ -265,8 +260,8 @@ def check_status(self, status, exclude=False, conn_handler=None):
265260
self._check_subclass()
266261

267262
# Get all available statuses
268-
conn_handler = (conn_handler if conn_handler is not None
269-
else SQLConnectionHandler())
263+
conn_handler = SQLConnectionHandler()
264+
270265
statuses = [x[0] for x in conn_handler.execute_fetchall(
271266
"SELECT DISTINCT status FROM qiita.{0}_status".format(self._table),
272267
(self._id, ))]

qiita_db/data.py

Lines changed: 17 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -139,15 +139,15 @@ def _link_data_filepaths(self, fp_ids, conn_handler):
139139
"VALUES (%s, %s)".format(self._data_filepath_table,
140140
self._data_filepath_column), values)
141141

142-
def add_filepaths(self, filepaths, conn_handler=None):
142+
def add_filepaths(self, filepaths):
143143
r"""Populates the DB tables for storing the filepaths and connects the
144144
`self` objects with these filepaths"""
145145
# Check that this function has been called from a subclass
146146
self._check_subclass()
147147

148148
# Check if the connection handler has been provided. Create a new
149149
# one if not.
150-
conn_handler = conn_handler if conn_handler else SQLConnectionHandler()
150+
conn_handler = SQLConnectionHandler()
151151

152152
# Update the status of the current object
153153
self._set_link_filepaths_status("linking")
@@ -193,11 +193,11 @@ def get_filepaths(self):
193193
self._data_filepath_table,
194194
self._data_filepath_column), {'id': self.id})
195195

196-
_, fb = get_mountpoint(self._table, conn_handler)[0]
196+
_, fb = get_mountpoint(self._table)[0]
197197
base_fp = partial(join, fb)
198198

199-
return [(fpid, base_fp(fp), convert_from_id(fid, "filepath_type",
200-
conn_handler)) for fpid, fp, fid in db_paths]
199+
return [(fpid, base_fp(fp), convert_from_id(fid, "filepath_type"))
200+
for fpid, fp, fid in db_paths]
201201

202202
def get_filepath_ids(self):
203203
self._check_subclass()
@@ -326,7 +326,7 @@ def create(cls, filetype, studies, filepaths=None):
326326

327327
# If file paths have been provided, add them to the raw data object
328328
if filepaths:
329-
rd.add_filepaths(filepaths, conn_handler)
329+
rd.add_filepaths(filepaths)
330330

331331
return rd
332332

@@ -465,20 +465,15 @@ def prep_templates(self):
465465
"WHERE raw_data_id = %s ORDER BY prep_template_id")
466466
return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))]
467467

468-
def _is_preprocessed(self, conn_handler=None):
468+
def _is_preprocessed(self):
469469
"""Returns whether the RawData has been preprocessed or not
470470
471-
Parameters
472-
----------
473-
conn_handler : SQLConnectionHandler
474-
The connection handler object connected to the DB
475-
476471
Returns
477472
-------
478473
bool
479474
whether the RawData has been preprocessed or not
480475
"""
481-
conn_handler = conn_handler if conn_handler else SQLConnectionHandler()
476+
conn_handler = SQLConnectionHandler()
482477
return conn_handler.execute_fetchone(
483478
"SELECT EXISTS(SELECT * FROM qiita.prep_template_preprocessed_data"
484479
" PTPD JOIN qiita.prep_template PT ON PT.prep_template_id = "
@@ -507,7 +502,7 @@ def _remove_filepath(self, fp, conn_handler, queue):
507502
"""
508503
# If the RawData has been already preprocessed, we cannot remove any
509504
# file - raise an error
510-
if self._is_preprocessed(conn_handler):
505+
if self._is_preprocessed():
511506
msg = ("Cannot clear all the filepaths from raw data %s, it has "
512507
"been already preprocessed" % self._id)
513508
self._set_link_filepaths_status("failed: %s" % msg)
@@ -525,7 +520,7 @@ def _remove_filepath(self, fp, conn_handler, queue):
525520
raise QiitaDBError(msg)
526521

527522
# Get the filpeath id
528-
fp_id = get_filepath_id(self._table, fp, conn_handler)
523+
fp_id = get_filepath_id(self._table, fp)
529524
fp_is_mine = conn_handler.execute_fetchone(
530525
"SELECT EXISTS(SELECT * FROM qiita.{0} WHERE filepath_id=%s AND "
531526
"{1}=%s)".format(self._data_filepath_table,
@@ -579,8 +574,7 @@ def clear_filepaths(self):
579574

580575
# Move the files, if they are not used, if you get to this point
581576
# self.studies should only have one element, thus self.studies[0]
582-
move_filepaths_to_upload_folder(self.studies[0], filepaths,
583-
conn_handler=conn_handler)
577+
move_filepaths_to_upload_folder(self.studies[0], filepaths)
584578

585579
def remove_filepath(self, fp):
586580
"""Removes the filepath from the RawData
@@ -614,7 +608,7 @@ def remove_filepath(self, fp):
614608
self._set_link_filepaths_status("idle")
615609

616610
# Delete the files, if they are not used anywhere
617-
purge_filepaths(conn_handler)
611+
purge_filepaths()
618612

619613
def status(self, study):
620614
"""The status of the raw data within the given study
@@ -753,7 +747,7 @@ def create(cls, study, preprocessed_params_table, preprocessed_params_id,
753747
data_type = prep_template.data_type(ret_id=True)
754748
else:
755749
# only data_type, so need id from the text
756-
data_type = convert_to_id(data_type, "data_type", conn_handler)
750+
data_type = convert_to_id(data_type, "data_type")
757751

758752
# Check that the preprocessed_params_table exists
759753
if not exists_dynamic_table(preprocessed_params_table, "preprocessed_",
@@ -804,7 +798,7 @@ def create(cls, study, preprocessed_params_table, preprocessed_params_id,
804798
conn_handler.execute_queue(q)
805799

806800
# Add the filepaths to the database and connect them
807-
ppd.add_filepaths(filepaths, conn_handler)
801+
ppd.add_filepaths(filepaths)
808802
return ppd
809803

810804
@classmethod
@@ -1278,7 +1272,7 @@ def create(cls, processed_params_table, processed_params_id, filepaths,
12781272
"You must provide either a preprocessed_data, a "
12791273
"data_type, or both")
12801274
else:
1281-
data_type = convert_to_id(data_type, "data_type", conn_handler)
1275+
data_type = convert_to_id(data_type, "data_type")
12821276

12831277
# We first check that the processed_params_table exists
12841278
if not exists_dynamic_table(processed_params_table,
@@ -1321,7 +1315,7 @@ def create(cls, processed_params_table, processed_params_id, filepaths,
13211315
"(%s, %s)".format(cls._study_processed_table),
13221316
(study_id, pd_id))
13231317

1324-
pd.add_filepaths(filepaths, conn_handler)
1318+
pd.add_filepaths(filepaths)
13251319
return cls(pd_id)
13261320

13271321
@classmethod
@@ -1524,8 +1518,7 @@ def status(self, status):
15241518

15251519
conn_handler = SQLConnectionHandler()
15261520

1527-
status_id = convert_to_id(status, 'processed_data_status',
1528-
conn_handler=conn_handler)
1521+
status_id = convert_to_id(status, 'processed_data_status')
15291522

15301523
sql = """UPDATE qiita.{0} SET processed_data_status_id = %s
15311524
WHERE processed_data_id=%s""".format(self._table)

qiita_db/job.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def exists(cls, datatype, command, options, analysis,
110110
"""
111111
conn_handler = SQLConnectionHandler()
112112
# check passed arguments and grab analyses for matching jobs
113-
datatype_id = convert_to_id(datatype, "data_type", conn_handler)
113+
datatype_id = convert_to_id(datatype, "data_type")
114114
sql = "SELECT command_id FROM qiita.command WHERE name = %s"
115115
command_id = conn_handler.execute_fetchone(sql, (command, ))[0]
116116
opts_json = params_dict_to_json(options)
@@ -238,7 +238,7 @@ def create(cls, datatype, command, options, analysis,
238238
"analysis: %s" % (datatype, command, options, analysis.id))
239239

240240
# Get the datatype and command ids from the strings
241-
datatype_id = convert_to_id(datatype, "data_type", conn_handler)
241+
datatype_id = convert_to_id(datatype, "data_type")
242242
sql = "SELECT command_id FROM qiita.command WHERE name = %s"
243243
command_id = conn_handler.execute_fetchone(sql, (command, ))[0]
244244
opts_json = params_dict_to_json(options)
@@ -297,7 +297,7 @@ def options(self):
297297
"job_id = %s)".format(self._table))
298298
db_comm = conn_handler.execute_fetchone(sql, (self._id, ))
299299
out_opt = loads(db_comm[1])
300-
basedir = get_db_files_base_dir(conn_handler)
300+
basedir = get_db_files_base_dir()
301301
join_f = partial(join, join(basedir, "job"))
302302
for k in out_opt:
303303
opts[k] = join_f("%s_%s_%s" % (self._id, db_comm[0], k.strip("-")))
@@ -422,7 +422,7 @@ def add_results(self, results):
422422
conn_handler = SQLConnectionHandler()
423423
self._lock_job(conn_handler)
424424
# convert all file type text to file type ids
425-
res_ids = [(fp, convert_to_id(fptype, "filepath_type", conn_handler))
425+
res_ids = [(fp, convert_to_id(fptype, "filepath_type"))
426426
for fp, fptype in results]
427427
file_ids = insert_filepaths(res_ids, self._id, self._table,
428428
"filepath", conn_handler, move_files=False)
@@ -485,7 +485,7 @@ def get_commands_by_datatype(cls, datatypes=None):
485485
conn_handler = SQLConnectionHandler()
486486
# get the ids of the datatypes to get commands for
487487
if datatypes is not None:
488-
datatype_info = [(convert_to_id(dt, "data_type", conn_handler), dt)
488+
datatype_info = [(convert_to_id(dt, "data_type"), dt)
489489
for dt in datatypes]
490490
else:
491491
datatype_info = conn_handler.execute_fetchall(

0 commit comments

Comments
 (0)