Skip to content

Commit

Permalink
remove legacy goal_run_with_grad_no_batch
Browse files Browse the repository at this point in the history
  • Loading branch information
lazyoracle committed Jul 1, 2021
1 parent eeea4ba commit 6ee8d75
Showing 1 changed file with 0 additions and 64 deletions.
64 changes: 0 additions & 64 deletions c3/optimizers/c3.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,67 +444,3 @@ def goal_run_with_grad(self, current_params):
self.optim_status["time"] = time.asctime()
self.evaluation += 1
return goal, grad

def goal_run_with_grad_no_batch(self, current_params):
"""
Same as goal_run but with gradient. Very resource intensive. Unoptimized at the
moment.
"""
exp_values = []
sim_values = []
exp_stds = []
exp_shots = []
count = 0
seqs_pp = self.seqs_per_point

with tf.GradientTape() as t:
t.watch(current_params)
for target, data in self.learn_data.items():
self.learn_from = data["seqs_grouped_by_param_set"]
self.gateset_opt_map = data["opt_map"]
indeces = self.select_from_data(self.batch_sizes[target])
for ipar in indeces:
count += 1
data_set = self.learn_from[ipar]
m_vals = data_set["results"][:seqs_pp]
sim_vals = self._one_par_sim_vals(
current_params, data_set, ipar, target
)
sim_values.extend(sim_vals)
exp_values.extend(m_vals)

self._log_one_dataset(data_set, ipar, indeces, sim_vals, count)

if target == "all":
goal = neg_loglkh_multinom_norm(
exp_values,
tf.stack(sim_values),
tf.constant(exp_stds, dtype=tf.float64),
tf.constant(exp_shots, dtype=tf.float64),
)
else:
goal = g_LL_prime(
exp_values,
tf.stack(sim_values),
tf.constant(exp_stds, dtype=tf.float64),
tf.constant(exp_shots, dtype=tf.float64),
)
grad = t.gradient(goal, current_params).numpy()
goal = goal.numpy()

with open(self.logdir + self.logname, "a") as logfile:
logfile.write("\nFinished batch with ")
logfile.write("{}: {}\n".format(self.fom.__name__, goal))
for cb_fom in self.callback_foms:
val = float(cb_fom(exp_values, sim_values, exp_stds, exp_shots).numpy())
logfile.write("{}: {}\n".format(cb_fom.__name__, val))
logfile.flush()

self.optim_status["params"] = [
par.numpy().tolist() for par in self.pmap.get_parameters()
]
self.optim_status["goal"] = goal
self.optim_status["gradient"] = list(grad.flatten())
self.optim_status["time"] = time.asctime()
self.evaluation += 1
return goal, grad

0 comments on commit 6ee8d75

Please sign in to comment.