Skip to content

Commit

Permalink
Fixed TDMS upload bug.
Browse files Browse the repository at this point in the history
  • Loading branch information
smartin71 committed Nov 8, 2023
1 parent 34da677 commit d2b0559
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 12 deletions.
9 changes: 0 additions & 9 deletions web-server/plugins/slycat-dac/py/dac_compute_coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,8 +517,6 @@ def compute_alpha_clusters_PCA (var_dist, meta_columns, meta_column_types):
"""

cherrypy.log.error("ALPHA PCA")

# landmarks should always be None for this calculation

# get size of data
Expand All @@ -528,17 +526,12 @@ def compute_alpha_clusters_PCA (var_dist, meta_columns, meta_column_types):
# form a matrix using only first PCA components
X = np.asarray([list(var_dist[i][:,0]) for i in range(num_vars)]).transpose()

cherrypy.log.error(str(X[0,0]))

# for each quantitative meta variable, compute scaled property vector
num_meta_cols = len(meta_column_types)
cherrypy.log.error(str(num_meta_cols))
prop_vecs = []
for i in range(num_meta_cols):

# populate property vector data
cherrypy.log.error(str(meta_column_types[i]))

if meta_column_types[i] == "float64":

prop_vec = np.asarray(meta_columns[i])
Expand Down Expand Up @@ -571,8 +564,6 @@ def compute_alpha_clusters_PCA (var_dist, meta_columns, meta_column_types):
# save property vector
prop_vecs.append(prop_vec)

cherrypy.log.error(str(prop_vecs))

# compute NNLS cluster button alpha values, if more than one data point
alpha_cluster_mat = np.zeros((num_meta_cols, num_vars))
if num_tests > 1:
Expand Down
7 changes: 4 additions & 3 deletions web-server/plugins/slycat-dac/py/dac_upload_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,12 @@ def init_upload_model (database, model, dac_error, parse_error_log, meta_column_

# start our single "dac-datapoints-meta" array.
dimensions = [dict(name="row", end=len(meta_rows))]
attributes = [dict(name=name, type=type) for name, type in zip(meta_column_str_names, alpha_column_types)]
attributes = [dict(name=name, type=type) for name, type in zip(meta_column_str_names, meta_column_types)]

slycat.web.server.put_model_array(database, model, "dac-datapoints-meta", 0, attributes, dimensions)

# upload data into the array
for index, data in enumerate(alpha_columns):
for index, data in enumerate(meta_columns):
slycat.web.server.put_model_arrayset_data(
database, model, "dac-datapoints-meta", "0/%s/..." % index, [data])

Expand All @@ -140,7 +141,7 @@ def init_upload_model (database, model, dac_error, parse_error_log, meta_column_
else:
meta_var_col_str_names.append(name.decode("utf-8"))

# start our single "dac-datapoints-meta" array.
# start our single "dac-variables-meta" array.
dimensions = [dict(name="row", end=len(meta_vars))]
attributes = [dict(name=name, type="string") for name in meta_var_col_str_names]
slycat.web.server.put_model_array(database, model, "dac-variables-meta", 0, attributes, dimensions)
Expand Down

0 comments on commit d2b0559

Please sign in to comment.