pair: class; QPanda::Variational::Optimizer
Optimizer <doxid-class_q_panda_1_1_variational_1_1_optimizer>
base class.
cpp
#include <Optimizer.h>
class Optimizer { public: // construction
Optimizer<doxid-class_q_panda_1_1_variational_1_1_optimizer_1addd794b0bb91a5330735dc9e44366f51>
(
var<doxid-class_q_panda_1_1_variational_1_1var>
lost_function, double learning_rate = 0.01 );// methods
virtual std::unordered_set<
var<doxid-class_q_panda_1_1_variational_1_1var>
>get_variables<doxid-class_q_panda_1_1_variational_1_1_optimizer_1a2a86c7101b183404f8507119954b8142>
() = 0; virtual std::unordered_map<var<doxid-class_q_panda_1_1_variational_1_1var>
, MatrixXd>compute_gradients<doxid-class_q_panda_1_1_variational_1_1_optimizer_1a956b08145e8efa6beb20eb6d6c062c07>
(std::unordered_set<var<doxid-class_q_panda_1_1_variational_1_1var>
>& var_set) = 0; virtual doubleget_loss<doxid-class_q_panda_1_1_variational_1_1_optimizer_1a6717bbf6bfeda77780e08cd8cfef64a8>
() = 0;
- virtual bool
run<doxid-class_q_panda_1_1_variational_1_1_optimizer_1aa697607b501c4106c53c3fe9d0a8c3dc>
(std::unordered_set<
var<doxid-class_q_panda_1_1_variational_1_1var>
>& leaves, size_t t = 0 ) = 0;
};
// direct descendants
class AdaGradOptimizer<doxid-class_q_panda_1_1_variational_1_1_ada_grad_optimizer>
; class AdamOptimizer<doxid-class_q_panda_1_1_variational_1_1_adam_optimizer>
; class MomentumOptimizer<doxid-class_q_panda_1_1_variational_1_1_momentum_optimizer>
; class RMSPropOptimizer<doxid-class_q_panda_1_1_variational_1_1_r_m_s_prop_optimizer>
; class VanillaGradientDescentOptimizer<doxid-class_q_panda_1_1_variational_1_1_vanilla_gradient_descent_optimizer>
;