Skip to content

Commit

Permalink
Merge pull request #872 from janezd/progress-bar-context
Browse files Browse the repository at this point in the history
Progress bar context handler and decorator
  • Loading branch information
kernc committed Dec 11, 2015
2 parents b033869 + 13a81f4 commit 90b25ab
Show file tree
Hide file tree
Showing 7 changed files with 157 additions and 122 deletions.
70 changes: 34 additions & 36 deletions Orange/widgets/evaluate/owtestlearners.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,42 +372,41 @@ def update_progress(finished):
preprocessor=self.preprocessor,
callback=update_progress)
self.setStatusMessage("Running")
self.progressBarInit()

try:
if self.resampling == OWTestLearners.KFold:
if len(self.data) < self.k_folds:
self.error(4, "Number of folds exceeds the data size")
return

warnings = []
results = Orange.evaluation.CrossValidation(
self.data, learners, k=self.k_folds, random_state=rstate,
warnings=warnings, **common_args)
if warnings:
self.warning(2, warnings[0])
elif self.resampling == OWTestLearners.LeaveOneOut:
results = Orange.evaluation.LeaveOneOut(
self.data, learners, **common_args)
elif self.resampling == OWTestLearners.ShuffleSplit:
train_size = self.sample_p / 100
results = Orange.evaluation.ShuffleSplit(
self.data, learners, n_resamples=self.n_repeat,
train_size=train_size, test_size=None,
random_state=rstate, **common_args)
elif self.resampling == OWTestLearners.TestOnTrain:
results = Orange.evaluation.TestOnTrainingData(
self.data, learners, **common_args)
elif self.resampling == OWTestLearners.TestOnTest:
results = Orange.evaluation.TestOnTestData(
self.data, self.test_data, learners, **common_args)
else:
assert False
except RuntimeError as e:
self.error(2, str(e))
self.setStatusMessage("")
self.progressBarFinished()
return
with self.progressBar():
try:
if self.resampling == OWTestLearners.KFold:
if len(self.data) < self.k_folds:
self.error(4, "Number of folds exceeds the data size")
return

warnings = []
results = Orange.evaluation.CrossValidation(
self.data, learners, k=self.k_folds,
random_state=rstate, warnings=warnings, **common_args)
if warnings:
self.warning(2, warnings[0])
elif self.resampling == OWTestLearners.LeaveOneOut:
results = Orange.evaluation.LeaveOneOut(
self.data, learners, **common_args)
elif self.resampling == OWTestLearners.ShuffleSplit:
train_size = self.sample_p / 100
results = Orange.evaluation.ShuffleSplit(
self.data, learners, n_resamples=self.n_repeat,
train_size=train_size, test_size=None,
random_state=rstate, **common_args)
elif self.resampling == OWTestLearners.TestOnTrain:
results = Orange.evaluation.TestOnTrainingData(
self.data, learners, **common_args)
elif self.resampling == OWTestLearners.TestOnTest:
results = Orange.evaluation.TestOnTestData(
self.data, self.test_data, learners, **common_args)
else:
assert False
except RuntimeError as e:
self.error(2, str(e))
self.setStatusMessage("")
return

learner_key = {slot.learner: key for key, slot in self.learners.items()}
for learner, result in zip(learners, split_by_model(results)):
Expand All @@ -431,7 +430,6 @@ def update_progress(finished):
self.learners[key]._replace(results=result, stats=stats)

self.setStatusMessage("")
self.progressBarFinished()

def _update_header(self):
# Set the correct horizontal header labels on the results_model.
Expand Down
4 changes: 3 additions & 1 deletion Orange/widgets/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -3285,6 +3285,9 @@ def __init__(self, widget, iterations):
self.count = 0
self.widget.progressBarInit()

def __del__(self):
self.finish()

def advance(self, count=1):
self.count += count
self.widget.progressBarSet(int(self.count * 100 / max(1, self.iter)))
Expand All @@ -3293,7 +3296,6 @@ def finish(self):
self.widget.progressBarFinished()



##############################################################################

def tabWidget(widget):
Expand Down
13 changes: 5 additions & 8 deletions Orange/widgets/unsupervised/owkmeans.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,19 +214,16 @@ def run_optimization(self):
self.controlArea.setDisabled(True)
self.optimization_runs = []
if self.check_data_size(self.k_to):
self.progressBarInit()
progress_steps = self.k_to - self.k_from + 1
self.optimization_runs = []
kmeans = KMeans(
init=['random', 'k-means++'][self.smart_init],
n_init=self.n_init,
max_iter=self.max_iterations)
for k in range(self.k_from, self.k_to + 1):
self.progressBarSet(100.0 * (k - self.k_from) /
progress_steps)
kmeans.params["n_clusters"] = k
self.optimization_runs.append((k, kmeans(self.data)))
self.progressBarFinished()
with self.progressBar(self.k_to - self.k_from + 1) as progress:
for k in range(self.k_from, self.k_to + 1):
progress.advance()
kmeans.params["n_clusters"] = k
self.optimization_runs.append((k, kmeans(self.data)))
finally:
self.controlArea.setDisabled(False)
self.show_results()
Expand Down
94 changes: 53 additions & 41 deletions Orange/widgets/utils/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,54 +486,66 @@ def get_optimal_clusters(self, attribute_name_order, add_result_funct):
self.data_domain.class_var])

# init again, in case that the attribute ordering took too much time
self.scatterWidget.progressBarInit()
start_time = time.time()
count = len(attribute_name_order)*(len(attribute_name_order)-1)/2
test_index = 0

for i in range(len(attribute_name_order)):
for j in range(i):
try:
attr1 = self.attribute_name_index[attribute_name_order[j]]
attr2 = self.attribute_name_index[attribute_name_order[i]]
test_index += 1
if self.clusterOptimization.isOptimizationCanceled():
secs = time.time() - start_time
self.clusterOptimization.setStatusBarText("Evaluation stopped (evaluated %d projections in %d min, %d sec)"
% (test_index, secs/60, secs%60))
self.scatterWidget.progressBarFinished()
return

data = self.create_projection_as_example_table([attr1, attr2],
domain = domain,
jitter_size = jitter_size)
graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict = self.clusterOptimization.evaluateClusters(data)

all_value = 0.0
classes_dict = {}
for key in valuedict.keys():
add_result_funct(valuedict[key], closuredict[key],
polygon_vertices_dict[key],
[attribute_name_order[i],
attribute_name_order[j]],
int(graph.objects[polygon_vertices_dict[key][0]].getclass()),
enlarged_closure_dict[key], other_dict[key])
classes_dict[key] = int(graph.objects[polygon_vertices_dict[key][0]].getclass())
all_value += valuedict[key]
add_result_funct(all_value, closuredict, polygon_vertices_dict,
[attribute_name_order[i], attribute_name_order[j]],
classes_dict, enlarged_closure_dict, other_dict) # add all the clusters

self.clusterOptimization.setStatusBarText("Evaluated %d projections..."
% (test_index))
self.scatterWidget.progressBarSet(100.0*test_index/float(count))
del data, graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict, classes_dict
count = len(attribute_name_order) * (len(attribute_name_order) - 1) / 2
with self.scatterWidget.progressBar(count) as progressBar:
for i in range(len(attribute_name_order)):
for j in range(i):
try:
index = self.attribute_name_index
attr1 = index[attribute_name_order[j]]
attr2 = index[attribute_name_order[i]]
test_index += 1
if self.clusterOptimization.isOptimizationCanceled():
secs = time.time() - start_time
self.clusterOptimization.setStatusBarText(
"Evaluation stopped "
"(evaluated %d projections in %d min, %d sec)"
% (test_index, secs / 60, secs % 60))
return

data = self.create_projection_as_example_table(
[attr1, attr2],
domain=domain, jitter_size=jitter_size)
graph, valuedict, closuredict, polygon_vertices_dict, \
enlarged_closure_dict, other_dict = \
self.clusterOptimization.evaluateClusters(data)

all_value = 0.0
classes_dict = {}
for key in valuedict.keys():
cls = int(graph.objects[polygon_vertices_dict
[key][0]].getclass())
add_result_funct(
valuedict[key], closuredict[key],
polygon_vertices_dict[key],
[attribute_name_order[i],
attribute_name_order[j]],
cls,
enlarged_closure_dict[key], other_dict[key])
classes_dict[key] = cls
all_value += valuedict[key]
# add all the clusters
add_result_funct(
all_value, closuredict, polygon_vertices_dict,
[attribute_name_order[i], attribute_name_order[j]],
classes_dict, enlarged_closure_dict, other_dict)

self.clusterOptimization.setStatusBarText(
"Evaluated %d projections..." % test_index)
progressBar.advance()
del data, graph, valuedict, closuredict, \
polygon_vertices_dict, enlarged_closure_dict, \
other_dict, classes_dict
except:
type, val, traceback = sys.exc_info()
sys.excepthook(type, val, traceback) # print the exception

secs = time.time() - start_time
self.clusterOptimization.setStatusBarText("Finished evaluation (evaluated %d projections in %d min, %d sec)" % (test_index, secs/60, secs%60))
self.scatterWidget.progressBarFinished()
self.clusterOptimization.setStatusBarText(
"Finished evaluation (evaluated %d projections in %d min, %d sec)"
% (test_index, secs / 60, secs % 60))

getOptimalClusters = get_optimal_clusters
18 changes: 7 additions & 11 deletions Orange/widgets/visualize/owheatmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,10 +779,10 @@ def cluster_rows(self, data, parts, ordered=False):
cluster = hierarchical.dist_matrix_clustering(matrix)

if ordered and cluster_ord is None:
self.progressBarInit()
cluster_ord = hierarchical.optimal_leaf_ordering(
cluster, matrix, progress_callback=self.progressBarSet)
self.progressBarFinished()
with self.progressBar():
cluster_ord = hierarchical.optimal_leaf_ordering(
cluster, matrix,
progress_callback=self.progressBarSet)

row_groups.append(row._replace(cluster=cluster, cluster_ordered=cluster_ord))

Expand Down Expand Up @@ -811,10 +811,9 @@ def cluster_columns(self, data, parts, ordered=False):
if cluster is None:
cluster = hierarchical.dist_matrix_clustering(matrix)
if ordered and cluster_ord is None:
self.progressBarInit()
cluster_ord = hierarchical.optimal_leaf_ordering(
cluster, matrix, progress_callback=self.progressBarSet)
self.progressBarFinished()
with self.progressBar():
cluster_ord = hierarchical.optimal_leaf_ordering(
cluster, matrix, progress_callback=self.progressBarSet)

col_groups = [col._replace(cluster=cluster, cluster_ordered=cluster_ord)
for col in parts.columns]
Expand All @@ -832,8 +831,6 @@ def construct_heatmaps(self, data, split_label=None):
else:
group_var = None

self.progressBarInit()

group_label = split_label
if self.merge_kmeans:
if self.kmeans_model is None:
Expand Down Expand Up @@ -901,7 +898,6 @@ def construct_heatmaps(self, data, split_label=None):
self.__columns_cache[group_label] = parts

self.heatmapparts = parts
self.progressBarFinished()

def construct_heatmaps_scene(self, parts, data):
def select_row(item):
Expand Down
46 changes: 21 additions & 25 deletions Orange/widgets/visualize/owscattermap.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,26 +809,25 @@ def sharpen_root_region(self, region):
def bin_func(xbins, ybins):
return grid_bin(data, xvar, yvar, xbins, ybins, zvar)

self.progressBarInit()
last_node = root
update_time = time.time()
changed = False

for i, node in enumerate(
sharpen_region(self._root, region, nbins, bin_func)):
tick = time.time() - update_time
changed = changed or node is not last_node
if changed and ((i % nbins == 0) or tick > 2.0):
self.update_map(node)
last_node = node
changed = False
update_time = time.time()
self.progressBarSet(100 * i / (nbins ** 2))
with self.progressBar(nbins ** 2) as progress_bar:
for i, node in enumerate(
sharpen_region(self._root, region, nbins, bin_func)):
tick = time.time() - update_time
changed = changed or node is not last_node
if changed and ((i % nbins == 0) or tick > 2.0):
self.update_map(node)
last_node = node
changed = False
update_time = time.time()
progress_bar.advance()

self._root = last_node
self._cache[xvar, yvar, zvar] = self._root
self.update_map(self._root)
self.progressBarFinished()

def _sampling_width(self):
if self._item is None:
Expand Down Expand Up @@ -910,26 +909,23 @@ def update_rects(node):
scored_rects = sorted(scored_rects, reverse=True,
key=operator.itemgetter(0))
root = self._root
self.progressBarInit()
update_time = time.time()

for i, (_, rect) in enumerate(scored_rects):
root = sharpen_region_recur(
root, rect.intersect(region),
nbins, depth + 1, bin_func
)
tick = time.time() - update_time
if tick > 2.0:
self.update_map(root)
update_time = time.time()

self.progressBarSet(100 * i / len(scored_rects))
with self.progressBar(len(scored_rects)) as progress_bar:
for i, (_, rect) in enumerate(scored_rects):
root = sharpen_region_recur(
root, rect.intersect(region),
nbins, depth + 1, bin_func)
tick = time.time() - update_time
if tick > 2.0:
self.update_map(root)
update_time = time.time()
progress_bar.advance()

self._root = root

self._cache[xvar, yvar, zvar] = self._root
self.update_map(self._root)
self.progressBarFinished()

def select_nodes_to_sharpen(self, node, region, bw, depth):
"""
Expand Down

0 comments on commit 90b25ab

Please sign in to comment.