diff --git a/bin/simple.cfg b/bin/simple.cfg index 22775df..2b8905b 100644 --- a/bin/simple.cfg +++ b/bin/simple.cfg @@ -5,7 +5,6 @@ Duplex { name = "Monkey Saddle Point optimization"; - mode = "opt"; iterations = "1000"; output = working_directory + "simple.png"; diff --git a/doc/class-diagram.graffle b/doc/class-diagram.graffle new file mode 100644 index 0000000..87d89e8 Binary files /dev/null and b/doc/class-diagram.graffle differ diff --git a/src/duplex.cpp b/src/duplex.cpp index 8738117..0497589 100755 --- a/src/duplex.cpp +++ b/src/duplex.cpp @@ -1,459 +1,53 @@ -// -// duplex.cpp -// duplex -// -// Created by Adel Ahmadyan on 4/23/15. -// Copyright (c) 2015 Adel Ahmadyan. All rights reserved. -// - #include "duplex.h" -#include "system.h" -#include -#include -#include -#include -#include -#include -#include - -#include "optimizer.h" -#include "gradientDescent.h" -#include "adagrad.h" -#include "adam.h" -#include "adadelta.h" -#include "nadam.h" - -using namespace std; Duplex::Duplex(Settings* c){ - settings = c; - iterationCap = settings->lookupInt("iterations"); - parameterDimension = settings->lookupInt("parameter.size"); - objectiveDimension = settings->lookupInt("objective.size"); - - t0 = settings->lookupFloat("initial_temperature"); - temperature = t0; - initialStepLength = settings->lookupFloat("initial_step_length"); - reinforcementLearningOption = settings->check("reinforcement_learning", "enable"); - shrinkGoalRegionWithTemperateOption = settings->check("shrink_goal_region_with_temperate", "enable"); - if (reinforcementLearningOption){ - minAward = settings->lookupFloat("min_reward"); - maxAward = settings->lookupFloat("max_reward"); - delta = settings->lookupFloat("delta_reward"); - } - if (settings->check("temperature", "fast")){ - temperatureOption = Temperature::temperaturefast; - }else if (settings->check("temperature", "boltz")){ - temperatureOption = Temperature::temperatureboltz; - }else if (settings->check("temperature", "exp")){ - temperatureOption = Temperature::temperatureexp; - }else{ - cout << "Temperature option not found in the config file. Possible options are [fast, boltz, exp]."; - exit(2); - } - - if (settings->check("annealing", "fast")){ - annealingOption = Annealing::annealingfast; - }else if (settings->check("annealing", "boltz")){ - annealingOption = Annealing::annealingboltz; - }else if (settings->check("annealing", "fast-random")){ - annealingOption = Annealing::annealingfastrandom; - }else if (settings->check("annealing", "boltz-random")){ - annealingOption = Annealing::annealingboltzrandom; - }else{ - cout << "Annealing option not found in the config file/ Possible options are [fast, boltz]"; - exit(2); - } + settings = c; + iterationCap = settings->lookupInt("iterations"); + system = new System(settings); - db = new Search(settings, objectiveDimension); -} - -Duplex::~Duplex(){ -} - -double* Duplex::getInitialState(){ - double* init = new double[parameterDimension]; - bool initialStateAssignmentIsRandom = settings->check("initial_state_assignment", "random"); + parameterDimension = settings->lookupInt("parameter.size"); + objectiveDimension = settings->lookupInt("objective.size"); - vector parameters = settings->listVariables("parameter", "uid-parameter"); - for(int i=0;ilookupFloat(("parameter." + parameters[i] + ".range.min").c_str()); - double max = settings->lookupFloat(("parameter." + parameters[i] + ".range.max").c_str()); - init[i] = (max - min)*((1.0*rand()) / RAND_MAX) + min; - }else{ - init[i] = settings->lookupFloat(("parameter." + parameters[i] + ".init").c_str()); - } + t0 = settings->lookupFloat("initial_temperature"); + temperature = t0; + initialStepLength = settings->lookupFloat("initial_step_length"); + reinforcementLearningOption = settings->check("reinforcement_learning", "enable"); + shrinkGoalRegionWithTemperateOption = settings->check("shrink_goal_region_with_temperate", "enable"); + if (reinforcementLearningOption){ + minAward = settings->lookupFloat("min_reward"); + maxAward = settings->lookupFloat("max_reward"); + delta = settings->lookupFloat("delta_reward"); } - return init; -} - -void Duplex::initialize(){ - cout << "Duplex initialization started. It make take a while to analyze the root." << endl; - double* init = getInitialState(); - double* reward = new double[parameterDimension]; - - for (int i = 0; i < parameterDimension; i++) - reward[i] = 1; - - //setting boundaries for the objectives - objectiveType = settings->listValues("objective", "uid-objective.goal.mode"); - vector objectiveGoalMinStringVector = settings->listValues("objective", "uid-objective.goal.min"); - vector objectiveGoalMaxStringVector = settings->listValues("objective", "uid-objective.goal.max"); - vector objectiveMinStringVector = settings->listValues("objective", "uid-objective.min"); - vector objectiveMaxStringVector = settings->listValues("objective", "uid-objective.max"); - vector objectiveOptimumStringVector = settings->listValues("objective", "uid-objective.goal.optimum"); - goalRegionBoxMin = new double[objectiveDimension]; - goalRegionBoxMax = new double[objectiveDimension]; - max = new double[objectiveDimension]; - min = new double[objectiveDimension]; - opt = new double[objectiveDimension]; - for (int i = 0; i parametersMinStringVector = settings->listValues("parameter", "uid-parameter.range.min"); - vector parametersMaxStringVector = settings->listValues("parameter", "uid-parameter.range.max"); - parameterMin = new double[parameterDimension]; - parameterMax = new double[parameterDimension]; - for (int i = 0; i < parameterDimension; i++){ - parameterMin[i] = stod(parametersMinStringVector[i]); - parameterMax[i] = stod(parametersMaxStringVector[i]); - } - - stat = new Stat(settings, max, min, opt); - cout << "Statistic class initialized." << endl; - - //Setting up the root node - root = new State(parameterDimension, objectiveDimension); - root->setParameter(init); - root->setReward(reward, (double)parameterDimension); - root->setID(0); - root->setParentID(-1); - system->eval(root); - double distance = score(root, max, min); - stat->updateError(distance); - db->insert(root); - cout << "Root node set." << endl; - - if (settings->check("mode", "fopt")){ - //generates an initial curve, starting at y(0)=0 and ending in y(n)=b - double b = settings->lookupFloat("parameter.b"); - double c0 = settings->lookupFloat("parameter.c0"); - int pathSegments = settings->lookupInt("initialPathSegments"); - - //randomly generate each segment in the path - for (int i = 1; i < pathSegments-1; i++){ - State* q = new State(parameterDimension, objectiveDimension); - double* p = new double[parameterDimension]; - //p[0] = i; //time, ignore this, the states are no longer time-annotated in dido case - for (int j = 0; j < parameterDimension; j++){ - p[j] = q->unifRand(parameterMin[j], parameterMax[j]); - } - q->setParameter(p); - q->setID(i); - q->setParent(db->getState(i - 1)); - system->eval(q); - distance = score(q, max, min); - stat->updateError(distance); - db->insert(q); - } - - //connect the last point to b - State* last = new State(parameterDimension, objectiveDimension); - double* p = new double[parameterDimension]; - //p[0] = pathSegments; - p[0] = b; - p[1] = 0; - last->setParameter(p); - last->setID(pathSegments - 1); - last->setParent(db->getState(pathSegments - 2)); - system->eval(last); - distance = score(last, max, min); - stat->updateError(distance); - db->insert(last); - cout << "Initial curve is set." << endl; - } - - cout << "Error and distance set." << endl; -} - -void Duplex::setObjective(){ - goal = new State(parameterDimension, objectiveDimension); - vector objectives = settings->listVariables("objective", "uid-objective"); - for (int i = 0; i < objectiveDimension; i++){ - string val = settings->lookupString(("objective." + objectives[i] + ".goal.optimum").c_str()); - goal->setObjective(stod(val), i); - } -} - -void Duplex::setSystem(System* sys){ - system=sys; -} - -// There are three approaches for taking the global step -// 1. only generate samples in the goal region. This is very depth oriented and forces duplex to directly converge toward the goal region. -// however duplex can get stuck in local minimas along the path, which forces us to make the goal region bigger --> makes duplex behaves more like RRT -// in the extreme case where goal region is the entire state space, duplex is RRT. -// 2. Similar to (1), we start by covering a big region of the state space at the begining, but we shrink the goal region as the algorithm progresses. In duplex, the size of -// the goal region is directly related to the temperature. In the end, all our samples will be around the goal state. -// 3. fitness: this is usefull in cases where we don't have a clear definition of the goal region. In that case, instead of generating a goal sample and picking -// the closest node, we pick the best fitted node among the nodes in the random tree. In order to to that, we keep a priority-queue of the X numbers of best observed -// nodes so far and pick randomly among them. This is the same as survival of the fittest concept in genetic algorithms. However, toward the end of the algorithm, duplex -// becomes very depth-oriented. -State* Duplex::globalStep(){ - State* qsample = new State(parameterDimension, objectiveDimension); - qsample->setParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); - if (shrinkGoalRegionWithTemperateOption){ - double* objective = new double[objectiveDimension]; - for (int i = 0; i < objectiveDimension; i++){ - delta = abs(goalRegionBoxMax[i] - goalRegionBoxMin[i]); - objective[i] = qsample->unifRand(opt[i] - temperature*delta, opt[i] + temperature*delta); - } - qsample->setObjective(objective); - }else{ - qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); - } - return qsample; -} - - -// Evaluates each states and assign them a score. -// States with lower score are better candidates, state with the minimum score (0) is the optimal solution. -double Duplex::score(State* state, double* maxBound, double* minBound){ - double distance = 0; - // In functional optimization, we have to evaluat each objective seperately. - if (settings->check("mode", "fopt")){ - double b = settings->lookupFloat("parameter.b"); - double c0 = settings->lookupFloat("parameter.c0"); - double* boundary = new double[2]; - boundary[0] = b; - boundary[1] = 0; - - double* objectives = state->getObjective(); - double* goals = goal->getObjective(); - int objectiveSize = state->getObjectiveSize(); - //measure objective - //currently we support the following types of objectives - //1. boundary: equivalent to the boundary conditions in BVPs. The closer we get to the boundary value, the better - //2. boundary-strict: similar to boundary, but if we are crossing the bounary value the sameples is not usefull anymore. - // for example, for the length of the curve we use bounadry-hard objectives, so the length is strictly less than boundary - //3. maximize - //4. minimize - for (int i = 0; i < objectiveSize; i++){ - auto potentialdistance = state->distance(2, state->getParameter(), boundary); - auto potentialsum = (state->getParameter()[1] * (b-state->getParameter()[0]) ) / 2.0 ; - double normalizedDistance = (objectives[i] - goals[i]) / (max[i] - min[i]); - normalizedDistance *= normalizedDistance; - if ((objectiveType[i] == "boundary-strict") - && (objectives[i]>opt[i]) //if the length of the curve is bigger than the strict boundary, discard this trace - && (objectives[i]+potentialdistance>opt[i]) //evaluating the potential of this sample: if we even take the direct route to the boundary - //condition and still the length is more than the bondary, discard this sample - ){ - normalizedDistance = std::numeric_limits::max(); - } - if ((objectiveType[i] == "maximize") - && (objectives[i]>max[i]) - && (objectives[i] + potentialsum > max[i])){ - normalizedDistance = 0; - } - distance += normalizedDistance; - } - delete[] boundary; //todo: remove this + if (settings->check("temperature", "fast")){ + temperatureOption = Temperature::temperaturefast; + }else if (settings->check("temperature", "boltz")){ + temperatureOption = Temperature::temperatureboltz; + }else if (settings->check("temperature", "exp")){ + temperatureOption = Temperature::temperatureexp; }else{ - //Most of the time, we can use Eucledean distance as the score. Closer to the goal, the better - distance = goal->distance(state, maxBound, minBound); + cout << "Temperature option not found in the config file. Possible options are [fast, boltz, exp]."; + exit(2); } - state->setScore(distance); - return distance; -} - -//slightly changes the input -double* Duplex::generateNewInput(State* q){ - int pSize = q->getParameterSize(); - double* param = q->getParameter(); - double* input = new double[pSize]; - for (int i = 0; i < pSize; i++){ - input[i] = param[i] + computeStepLength(); - if (input[i] < parameterMin[i]) input[i] = parameterMin[i]; - if (input[i] > parameterMax[i]) input[i] = parameterMax[i]; - } - - //input[nextCandidateParameter] += stepLength; - //if (input[nextCandidateParameter] < parameterMin[nextCandidateParameter]) input[nextCandidateParameter] = parameterMin[nextCandidateParameter]; - //if (input[nextCandidateParameter] > parameterMax[nextCandidateParameter]) input[nextCandidateParameter] = parameterMax[nextCandidateParameter]; - return input; -} - -int Duplex::computeNextCandidateParameter(State* qnear){ - if (reinforcementLearningOption){ - double random = qnear->getRewardCDF() * ((double)rand() / (RAND_MAX)); - double* reward = qnear->getRewardVector(); - for (int i = 0; i < parameterDimension; i++){ - random -= reward[i]; - if (random <= 0) return i; - } - return parameterDimension - 1; - }else{ - nextCandidateParameter = rand() % qnear->getParameterSize(); - return nextCandidateParameter; - } -} - -void Duplex::computeTemperature(int i){ - switch (temperatureOption){ - case Temperature::temperaturefast: - temperature = t0 * (1- (1.0*i) / iterationCap ); - break; - case Temperature::temperatureboltz: - temperature = t0 / log(i+2); - break; - case Temperature::temperatureexp: - temperature = temperature*0.95; - break; - } - if (temperature < 0.05) temperature = 0.05; -} - -double Duplex::computeStepLength(){ - double maxStep = initialStepLength * rand() / double(RAND_MAX); - int direction = 1; if (rand() % 2 == 0) direction = -1; - switch (annealingOption){ - case Annealing::annealingfast: - stepLength = direction * initialStepLength * temperature; - break; - case Annealing::annealingboltz: - stepLength = direction * initialStepLength * sqrt(temperature); - break; - case Annealing::annealingboltzrandom: - stepLength = direction * maxStep * sqrt(temperature); - break; - case Annealing::annealingfastrandom: - stepLength = direction * maxStep * temperature; - break; - } - return stepLength; - -} - -State* Duplex::localStep(int i, State* qnear){ - computeTemperature(i); - nextCandidateParameter = computeNextCandidateParameter(qnear); - double* input = generateNewInput(qnear); - State* qnew = new State(parameterDimension, objectiveDimension); - qnew->setParameter(input); - qnew->setParent(qnear); - stat->sensitivity->pushBackInputChange(nextCandidateParameter, qnew->getParameter()[nextCandidateParameter], stepLength); - return qnew; -} - - - -//Optimizer types: -// Duplex internally supports three types of optimizer -// 1. Tree optimizer, which is based on random tree optimization algorithm -// 2. Walk optimizer, a generic term for descent algorithms, such as gradient descent, Adam, RMSProp, etc. -// 3. Heuristic methods, currently supporting only simulated annealing - -//todo: needs refactoring, should be moved to its own class -// Generic walk-based optimizer methods, such as gradient descent, Adap, RMPProp, etc. -void Duplex::walkOptimizer(){ - auto epsilon = settings->lookupFloat("optimization.epsilon"); - Optimizer* optimizer; - if(settings->check("optimization.algorithm", "gd")){ - optimizer = new GradientDescent(settings); - }else if(settings->check("optimization.algorithm", "adagrad")){ - optimizer = new Adagrad(settings); - }else if(settings->check("optimization.algorithm", "nadam")){ - optimizer = new Nadam(settings); - }else if(settings->check("optimization.algorithm", "adam") || settings->check("optimization.algorithm", "adamax")){ - optimizer = new Adam(settings); - }else if(settings->check("optimization.algorithm", "adadelta")){ - optimizer = new Adadelta(settings); + if (settings->check("annealing", "fast")){ + annealingOption = Annealing::annealingfast; + }else if (settings->check("annealing", "boltz")){ + annealingOption = Annealing::annealingboltz; + }else if (settings->check("annealing", "fast-random")){ + annealingOption = Annealing::annealingfastrandom; + }else if (settings->check("annealing", "boltz-random")){ + annealingOption = Annealing::annealingboltzrandom; }else{ - optimizer = new GradientDescent(settings); - } - - auto iterations=1; - while(stat->getDeltaConvergence()>epsilon && (iterationsgetState(); - State* next = optimizer->update(prev); - system->eval(next,0); - insert(iterations, prev, next); - stat->updateConvergence(next); - iterations++; - } - cout << "Optimizer finished, #iteration= " << iterations << endl ; - -} - -void Duplex::treeOptimizer(){ - for (int i = 1; inearestNode(qsample); //Find closest node to the objective - State* qnew = localStep(i, qnear); - system->eval(qnew, 0); //simulate the circuit with the new input - insert(i, qnear, qnew); + cout << "Annealing option not found in the config file/ Possible options are [fast, boltz]"; + exit(2); } - if (settings->check("sensitivity-analysis.enable", "true")) - stat->sensitivity->generateSensitivityMatrix(); -} - - -void Duplex::optimize(){ - //treeOptimizer(); - walkOptimizer(); + db = new Search(settings, objectiveDimension); } -void Duplex::simulated_annealing(){ - State* bestState = root; - auto bestEnergy = goal->distance(root, max, min); - for (int i = 1; isetParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); - system->eval(qsample, 0); - auto r = (1.0*rand()) / RAND_MAX; - auto energy = goal->distance(qsample, max, min); - auto t = exp((bestEnergy - energy) / (20 * i)); - cout << "random=" << r << " temperature=" << t << " energy=" << energy << " best" << bestEnergy << endl; - if ((energy < bestEnergy) || (tupdateError(score(bestState, max, min)); - } +Duplex::~Duplex(){ } -//skratchpad, should be merged with globalStep function -State* Duplex::foptGlobalStep(){ - State* qsample = new State(parameterDimension, objectiveDimension); - qsample->setParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); - qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); - //for (int i = 0; i < objectiveDimension; i++){ - // cout << qsample->getObjective()[i] << " ,"; - //}cout << endl; - if (shrinkGoalRegionWithTemperateOption){ - double* objective = new double[objectiveDimension]; - for (int i = 0; i < objectiveDimension; i++){ - delta = abs(goalRegionBoxMax[i] - goalRegionBoxMin[i]); - objective[i] = qsample->unifRand(opt[i] - temperature*delta, opt[i] + temperature*delta); - } - qsample->setObjective(objective); - } - else{ - qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); - } - return qsample; -} void Duplex::insert(int i, State* qnear, State* qnew){ qnew->setID(i); @@ -461,21 +55,22 @@ void Duplex::insert(int i, State* qnear, State* qnew){ db->insert(qnew); //add a new node to the database } -/// functional optimization algorithm using Duplex -/// the big difference between fopt and duplex is that in fopt, we are optimizing for an entire path, not just for one state -void Duplex::functionalOptimization(){ - for (int i = db->getSize(); i < iterationCap; i++){ - cout << i << endl; - State* qsample = foptGlobalStep(); //generate a new bias sample - State* qnear = db->nearestNode(qsample); //Find closest node to the objective - State* qnew = localStep(i, qnear); - system->eval(qnew, 0); //simulate the circuit with the new input - //bias.push_back(qsample); - //updateError(score(qnew, max, min)); - //updateReward(qnear, qnew); - //updateSensitivity(qnear, qnew); - insert(i, qnew, qnear); - } +void Duplex::optimize(){ + cout << "Executing duplex optimization ..." << endl ; + auto root = initialize(); + insert(0, root, root); + int iteration=0; + while(isConverged(iteration++)){ + auto qprev = globalStep(); + auto qnew = localStep(iteration, qprev); + auto cost = evaluate(qnew); + cout << cost << endl ; + insert(iteration, qprev, qnew); + } +} + +void Duplex::train(){ + optimize(); } Search* Duplex::getDB(){ @@ -486,7 +81,6 @@ System* Duplex::getSystem(){ return system; } - void Duplex::setStat(Stat* s){ stat = s; } @@ -499,11 +93,11 @@ void Duplex::save(boost::property_tree::ptree* ptree){ ptree->add("duplex.version", 1); boost::property_tree::ptree& data = ptree->add("duplex.data", ""); db->save(&data); - boost::property_tree::ptree& sense = ptree->add("duplex.sensitivity", ""); - stat->sensitivity->save(&sense); + boost::property_tree::ptree& sense = ptree->add("duplex.sensitivity", ""); + stat->sensitivity->save(&sense); boost::property_tree::ptree& node = ptree->add("duplex.stat", ""); - - for (int i = 0; igetSize(); i++){ + + for (int i = 0; igetSize(); i++){ boost::property_tree::ptree& iter = node.add("iteration", ""); iter.add("id", i); iter.add("error", stat->getError(i)); diff --git a/src/duplex.h b/src/duplex.h index 7332b8c..dddb582 100755 --- a/src/duplex.h +++ b/src/duplex.h @@ -1,11 +1,5 @@ -// -// duplex.h -// duplex -// -// Created by Adel Ahmadyan on 4/23/15. -// Copyright (c) 2015 Adel Ahmadyan. All rights reserved. -// #pragma once +#include #include #include "configuration.h" @@ -14,82 +8,72 @@ #include "stat.h" #include "data.h" +using namespace std; + enum class Temperature { temperatureexp, temperaturefast, temperatureboltz }; enum class Annealing { annealingfast, annealingboltz, annealingfastrandom, annealingboltzrandom}; class Duplex{ - Settings* settings; + +protected: + Settings* settings; Search* db; - Stat* stat; + Stat* stat; + System* system; - //System: - State* root; - State* goal; - System* system; - double* max; - double* min; - double* opt; - int parameterDimension; - int objectiveDimension; - - //Duplex options - int iterationCap; - Temperature temperatureOption; - Annealing annealingOption; - double t0; //initial temperature - double temperature; //current temperature - double stepLength; - double initialStepLength; - bool reinforcementLearningOption; - bool shrinkGoalRegionWithTemperateOption; - int nextCandidateParameter; - - //Duplex outputs: - double minAward; - double maxAward; - double delta; + //System: + State* goal; + double* max; + double* min; + double* opt; + int parameterDimension; + int objectiveDimension; + + //Duplex options + int iterationCap; + Temperature temperatureOption; + Annealing annealingOption; + double t0; //initial temperature + double temperature; //current temperature + double stepLength; + double initialStepLength; + bool reinforcementLearningOption; + bool shrinkGoalRegionWithTemperateOption; + int nextCandidateParameter; + + //Duplex outputs: + double minAward; + double maxAward; + double delta; double* goalRegionBoxMin; double* goalRegionBoxMax; - double* parameterMin; - double* parameterMax; - vector objectiveType; - + double* parameterMin; + double* parameterMax; + vector objectiveType; public: - Duplex(Settings*); + Duplex(Settings* s); ~Duplex(); - double* getInitialState(); - void setSystem(System*); - void initialize(); - void setObjective(); - - void walkOptimizer(); - void optimize(); - void simulated_annealing(); - void functionalOptimization(); - void treeOptimizer(); - //plotting methods - string draw(int); - string drawParameterTree(int x, int y, int z, string sizingPreference, string plotType, string title); - string drawObjectiveTree(int, int, string); - string drawTrace(int x, int y, string title); - string drawCanvas(string function, double xmin, double xmax, double ymin, double ymax, string, string); + //initialization methods + + // methods handling input data + // methods for optimizing the function + virtual State* initialize()=0; void insert(int i, State* qnear, State* qnew); - double score(State*, double*, double*); - double* generateNewInput(State* q); - State* localStep(int i, State*); - State* globalStep(); - State* foptGlobalStep(); + virtual State* globalStep()=0; + virtual State* localStep(int, State*)=0; + virtual double evaluate(State*)=0; + virtual bool isConverged(int)=0; + void optimize(); + void train(); - void computeTemperature(int i); - double computeStepLength(); - int computeNextCandidateParameter(State* qnear); - Search* getDB(); - System* getSystem(); + // aux methods, loading, saving, etc. void save(boost::property_tree::ptree* ptree); void load(boost::property_tree::ptree* ptree); + Search* getDB(); + System* getSystem(); void setStat(Stat*); Stat* getStat(); -}; \ No newline at end of file +}; diff --git a/src/duplex_old.cpp b/src/duplex_old.cpp new file mode 100755 index 0000000..8738117 --- /dev/null +++ b/src/duplex_old.cpp @@ -0,0 +1,526 @@ +// +// duplex.cpp +// duplex +// +// Created by Adel Ahmadyan on 4/23/15. +// Copyright (c) 2015 Adel Ahmadyan. All rights reserved. +// + +#include "duplex.h" +#include "system.h" +#include +#include +#include +#include +#include +#include +#include + +#include "optimizer.h" +#include "gradientDescent.h" +#include "adagrad.h" +#include "adam.h" +#include "adadelta.h" +#include "nadam.h" + +using namespace std; + +Duplex::Duplex(Settings* c){ + settings = c; + iterationCap = settings->lookupInt("iterations"); + parameterDimension = settings->lookupInt("parameter.size"); + objectiveDimension = settings->lookupInt("objective.size"); + + t0 = settings->lookupFloat("initial_temperature"); + temperature = t0; + initialStepLength = settings->lookupFloat("initial_step_length"); + reinforcementLearningOption = settings->check("reinforcement_learning", "enable"); + shrinkGoalRegionWithTemperateOption = settings->check("shrink_goal_region_with_temperate", "enable"); + if (reinforcementLearningOption){ + minAward = settings->lookupFloat("min_reward"); + maxAward = settings->lookupFloat("max_reward"); + delta = settings->lookupFloat("delta_reward"); + } + if (settings->check("temperature", "fast")){ + temperatureOption = Temperature::temperaturefast; + }else if (settings->check("temperature", "boltz")){ + temperatureOption = Temperature::temperatureboltz; + }else if (settings->check("temperature", "exp")){ + temperatureOption = Temperature::temperatureexp; + }else{ + cout << "Temperature option not found in the config file. Possible options are [fast, boltz, exp]."; + exit(2); + } + + if (settings->check("annealing", "fast")){ + annealingOption = Annealing::annealingfast; + }else if (settings->check("annealing", "boltz")){ + annealingOption = Annealing::annealingboltz; + }else if (settings->check("annealing", "fast-random")){ + annealingOption = Annealing::annealingfastrandom; + }else if (settings->check("annealing", "boltz-random")){ + annealingOption = Annealing::annealingboltzrandom; + }else{ + cout << "Annealing option not found in the config file/ Possible options are [fast, boltz]"; + exit(2); + } + + db = new Search(settings, objectiveDimension); +} + +Duplex::~Duplex(){ +} + +double* Duplex::getInitialState(){ + double* init = new double[parameterDimension]; + bool initialStateAssignmentIsRandom = settings->check("initial_state_assignment", "random"); + + vector parameters = settings->listVariables("parameter", "uid-parameter"); + for(int i=0;ilookupFloat(("parameter." + parameters[i] + ".range.min").c_str()); + double max = settings->lookupFloat(("parameter." + parameters[i] + ".range.max").c_str()); + init[i] = (max - min)*((1.0*rand()) / RAND_MAX) + min; + }else{ + init[i] = settings->lookupFloat(("parameter." + parameters[i] + ".init").c_str()); + } + } + return init; +} + +void Duplex::initialize(){ + cout << "Duplex initialization started. It make take a while to analyze the root." << endl; + double* init = getInitialState(); + double* reward = new double[parameterDimension]; + + for (int i = 0; i < parameterDimension; i++) + reward[i] = 1; + + //setting boundaries for the objectives + objectiveType = settings->listValues("objective", "uid-objective.goal.mode"); + vector objectiveGoalMinStringVector = settings->listValues("objective", "uid-objective.goal.min"); + vector objectiveGoalMaxStringVector = settings->listValues("objective", "uid-objective.goal.max"); + vector objectiveMinStringVector = settings->listValues("objective", "uid-objective.min"); + vector objectiveMaxStringVector = settings->listValues("objective", "uid-objective.max"); + vector objectiveOptimumStringVector = settings->listValues("objective", "uid-objective.goal.optimum"); + goalRegionBoxMin = new double[objectiveDimension]; + goalRegionBoxMax = new double[objectiveDimension]; + max = new double[objectiveDimension]; + min = new double[objectiveDimension]; + opt = new double[objectiveDimension]; + for (int i = 0; i parametersMinStringVector = settings->listValues("parameter", "uid-parameter.range.min"); + vector parametersMaxStringVector = settings->listValues("parameter", "uid-parameter.range.max"); + parameterMin = new double[parameterDimension]; + parameterMax = new double[parameterDimension]; + for (int i = 0; i < parameterDimension; i++){ + parameterMin[i] = stod(parametersMinStringVector[i]); + parameterMax[i] = stod(parametersMaxStringVector[i]); + } + + stat = new Stat(settings, max, min, opt); + cout << "Statistic class initialized." << endl; + + //Setting up the root node + root = new State(parameterDimension, objectiveDimension); + root->setParameter(init); + root->setReward(reward, (double)parameterDimension); + root->setID(0); + root->setParentID(-1); + system->eval(root); + double distance = score(root, max, min); + stat->updateError(distance); + db->insert(root); + cout << "Root node set." << endl; + + if (settings->check("mode", "fopt")){ + //generates an initial curve, starting at y(0)=0 and ending in y(n)=b + double b = settings->lookupFloat("parameter.b"); + double c0 = settings->lookupFloat("parameter.c0"); + int pathSegments = settings->lookupInt("initialPathSegments"); + + //randomly generate each segment in the path + for (int i = 1; i < pathSegments-1; i++){ + State* q = new State(parameterDimension, objectiveDimension); + double* p = new double[parameterDimension]; + //p[0] = i; //time, ignore this, the states are no longer time-annotated in dido case + for (int j = 0; j < parameterDimension; j++){ + p[j] = q->unifRand(parameterMin[j], parameterMax[j]); + } + q->setParameter(p); + q->setID(i); + q->setParent(db->getState(i - 1)); + system->eval(q); + distance = score(q, max, min); + stat->updateError(distance); + db->insert(q); + } + + //connect the last point to b + State* last = new State(parameterDimension, objectiveDimension); + double* p = new double[parameterDimension]; + //p[0] = pathSegments; + p[0] = b; + p[1] = 0; + last->setParameter(p); + last->setID(pathSegments - 1); + last->setParent(db->getState(pathSegments - 2)); + system->eval(last); + distance = score(last, max, min); + stat->updateError(distance); + db->insert(last); + cout << "Initial curve is set." << endl; + } + + cout << "Error and distance set." << endl; +} + +void Duplex::setObjective(){ + goal = new State(parameterDimension, objectiveDimension); + vector objectives = settings->listVariables("objective", "uid-objective"); + for (int i = 0; i < objectiveDimension; i++){ + string val = settings->lookupString(("objective." + objectives[i] + ".goal.optimum").c_str()); + goal->setObjective(stod(val), i); + } +} + +void Duplex::setSystem(System* sys){ + system=sys; +} + +// There are three approaches for taking the global step +// 1. only generate samples in the goal region. This is very depth oriented and forces duplex to directly converge toward the goal region. +// however duplex can get stuck in local minimas along the path, which forces us to make the goal region bigger --> makes duplex behaves more like RRT +// in the extreme case where goal region is the entire state space, duplex is RRT. +// 2. Similar to (1), we start by covering a big region of the state space at the begining, but we shrink the goal region as the algorithm progresses. In duplex, the size of +// the goal region is directly related to the temperature. In the end, all our samples will be around the goal state. +// 3. fitness: this is usefull in cases where we don't have a clear definition of the goal region. In that case, instead of generating a goal sample and picking +// the closest node, we pick the best fitted node among the nodes in the random tree. In order to to that, we keep a priority-queue of the X numbers of best observed +// nodes so far and pick randomly among them. This is the same as survival of the fittest concept in genetic algorithms. However, toward the end of the algorithm, duplex +// becomes very depth-oriented. +State* Duplex::globalStep(){ + State* qsample = new State(parameterDimension, objectiveDimension); + qsample->setParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); + if (shrinkGoalRegionWithTemperateOption){ + double* objective = new double[objectiveDimension]; + for (int i = 0; i < objectiveDimension; i++){ + delta = abs(goalRegionBoxMax[i] - goalRegionBoxMin[i]); + objective[i] = qsample->unifRand(opt[i] - temperature*delta, opt[i] + temperature*delta); + } + qsample->setObjective(objective); + }else{ + qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); + } + return qsample; +} + + +// Evaluates each states and assign them a score. +// States with lower score are better candidates, state with the minimum score (0) is the optimal solution. +double Duplex::score(State* state, double* maxBound, double* minBound){ + double distance = 0; + // In functional optimization, we have to evaluat each objective seperately. + if (settings->check("mode", "fopt")){ + double b = settings->lookupFloat("parameter.b"); + double c0 = settings->lookupFloat("parameter.c0"); + double* boundary = new double[2]; + boundary[0] = b; + boundary[1] = 0; + + double* objectives = state->getObjective(); + double* goals = goal->getObjective(); + int objectiveSize = state->getObjectiveSize(); + //measure objective + //currently we support the following types of objectives + //1. boundary: equivalent to the boundary conditions in BVPs. The closer we get to the boundary value, the better + //2. boundary-strict: similar to boundary, but if we are crossing the bounary value the sameples is not usefull anymore. + // for example, for the length of the curve we use bounadry-hard objectives, so the length is strictly less than boundary + //3. maximize + //4. minimize + for (int i = 0; i < objectiveSize; i++){ + auto potentialdistance = state->distance(2, state->getParameter(), boundary); + auto potentialsum = (state->getParameter()[1] * (b-state->getParameter()[0]) ) / 2.0 ; + double normalizedDistance = (objectives[i] - goals[i]) / (max[i] - min[i]); + normalizedDistance *= normalizedDistance; + if ((objectiveType[i] == "boundary-strict") + && (objectives[i]>opt[i]) //if the length of the curve is bigger than the strict boundary, discard this trace + && (objectives[i]+potentialdistance>opt[i]) //evaluating the potential of this sample: if we even take the direct route to the boundary + //condition and still the length is more than the bondary, discard this sample + ){ + normalizedDistance = std::numeric_limits::max(); + } + if ((objectiveType[i] == "maximize") + && (objectives[i]>max[i]) + && (objectives[i] + potentialsum > max[i])){ + normalizedDistance = 0; + } + distance += normalizedDistance; + } + delete[] boundary; //todo: remove this + }else{ + //Most of the time, we can use Eucledean distance as the score. Closer to the goal, the better + distance = goal->distance(state, maxBound, minBound); + } + + state->setScore(distance); + return distance; +} + +//slightly changes the input +double* Duplex::generateNewInput(State* q){ + int pSize = q->getParameterSize(); + double* param = q->getParameter(); + double* input = new double[pSize]; + for (int i = 0; i < pSize; i++){ + input[i] = param[i] + computeStepLength(); + if (input[i] < parameterMin[i]) input[i] = parameterMin[i]; + if (input[i] > parameterMax[i]) input[i] = parameterMax[i]; + } + + //input[nextCandidateParameter] += stepLength; + //if (input[nextCandidateParameter] < parameterMin[nextCandidateParameter]) input[nextCandidateParameter] = parameterMin[nextCandidateParameter]; + //if (input[nextCandidateParameter] > parameterMax[nextCandidateParameter]) input[nextCandidateParameter] = parameterMax[nextCandidateParameter]; + return input; +} + +int Duplex::computeNextCandidateParameter(State* qnear){ + if (reinforcementLearningOption){ + double random = qnear->getRewardCDF() * ((double)rand() / (RAND_MAX)); + double* reward = qnear->getRewardVector(); + for (int i = 0; i < parameterDimension; i++){ + random -= reward[i]; + if (random <= 0) return i; + } + return parameterDimension - 1; + }else{ + nextCandidateParameter = rand() % qnear->getParameterSize(); + return nextCandidateParameter; + } +} + +void Duplex::computeTemperature(int i){ + switch (temperatureOption){ + case Temperature::temperaturefast: + temperature = t0 * (1- (1.0*i) / iterationCap ); + break; + case Temperature::temperatureboltz: + temperature = t0 / log(i+2); + break; + case Temperature::temperatureexp: + temperature = temperature*0.95; + break; + } + if (temperature < 0.05) temperature = 0.05; +} + +double Duplex::computeStepLength(){ + double maxStep = initialStepLength * rand() / double(RAND_MAX); + int direction = 1; if (rand() % 2 == 0) direction = -1; + switch (annealingOption){ + case Annealing::annealingfast: + stepLength = direction * initialStepLength * temperature; + break; + case Annealing::annealingboltz: + stepLength = direction * initialStepLength * sqrt(temperature); + break; + case Annealing::annealingboltzrandom: + stepLength = direction * maxStep * sqrt(temperature); + break; + case Annealing::annealingfastrandom: + stepLength = direction * maxStep * temperature; + break; + } + return stepLength; + +} + +State* Duplex::localStep(int i, State* qnear){ + computeTemperature(i); + nextCandidateParameter = computeNextCandidateParameter(qnear); + double* input = generateNewInput(qnear); + State* qnew = new State(parameterDimension, objectiveDimension); + qnew->setParameter(input); + qnew->setParent(qnear); + stat->sensitivity->pushBackInputChange(nextCandidateParameter, qnew->getParameter()[nextCandidateParameter], stepLength); + return qnew; +} + + + +//Optimizer types: +// Duplex internally supports three types of optimizer +// 1. Tree optimizer, which is based on random tree optimization algorithm +// 2. Walk optimizer, a generic term for descent algorithms, such as gradient descent, Adam, RMSProp, etc. +// 3. Heuristic methods, currently supporting only simulated annealing + +//todo: needs refactoring, should be moved to its own class +// Generic walk-based optimizer methods, such as gradient descent, Adap, RMPProp, etc. +void Duplex::walkOptimizer(){ + auto epsilon = settings->lookupFloat("optimization.epsilon"); + Optimizer* optimizer; + if(settings->check("optimization.algorithm", "gd")){ + optimizer = new GradientDescent(settings); + }else if(settings->check("optimization.algorithm", "adagrad")){ + optimizer = new Adagrad(settings); + }else if(settings->check("optimization.algorithm", "nadam")){ + optimizer = new Nadam(settings); + }else if(settings->check("optimization.algorithm", "adam") || settings->check("optimization.algorithm", "adamax")){ + optimizer = new Adam(settings); + }else if(settings->check("optimization.algorithm", "adadelta")){ + optimizer = new Adadelta(settings); + }else{ + optimizer = new GradientDescent(settings); + } + + auto iterations=1; + while(stat->getDeltaConvergence()>epsilon && (iterationsgetState(); + State* next = optimizer->update(prev); + system->eval(next,0); + insert(iterations, prev, next); + stat->updateConvergence(next); + iterations++; + } + cout << "Optimizer finished, #iteration= " << iterations << endl ; + +} + +void Duplex::treeOptimizer(){ + for (int i = 1; inearestNode(qsample); //Find closest node to the objective + State* qnew = localStep(i, qnear); + system->eval(qnew, 0); //simulate the circuit with the new input + insert(i, qnear, qnew); + } + + if (settings->check("sensitivity-analysis.enable", "true")) + stat->sensitivity->generateSensitivityMatrix(); +} + + +void Duplex::optimize(){ + //treeOptimizer(); + walkOptimizer(); +} + +void Duplex::simulated_annealing(){ + State* bestState = root; + auto bestEnergy = goal->distance(root, max, min); + for (int i = 1; isetParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); + system->eval(qsample, 0); + auto r = (1.0*rand()) / RAND_MAX; + auto energy = goal->distance(qsample, max, min); + auto t = exp((bestEnergy - energy) / (20 * i)); + cout << "random=" << r << " temperature=" << t << " energy=" << energy << " best" << bestEnergy << endl; + if ((energy < bestEnergy) || (tupdateError(score(bestState, max, min)); + } +} + +//skratchpad, should be merged with globalStep function +State* Duplex::foptGlobalStep(){ + State* qsample = new State(parameterDimension, objectiveDimension); + qsample->setParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); + qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); + //for (int i = 0; i < objectiveDimension; i++){ + // cout << qsample->getObjective()[i] << " ,"; + //}cout << endl; + if (shrinkGoalRegionWithTemperateOption){ + double* objective = new double[objectiveDimension]; + for (int i = 0; i < objectiveDimension; i++){ + delta = abs(goalRegionBoxMax[i] - goalRegionBoxMin[i]); + objective[i] = qsample->unifRand(opt[i] - temperature*delta, opt[i] + temperature*delta); + } + qsample->setObjective(objective); + } + else{ + qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); + } + return qsample; +} + +void Duplex::insert(int i, State* qnear, State* qnew){ + qnew->setID(i); + qnew->setParent(qnear); + db->insert(qnew); //add a new node to the database +} + +/// functional optimization algorithm using Duplex +/// the big difference between fopt and duplex is that in fopt, we are optimizing for an entire path, not just for one state +void Duplex::functionalOptimization(){ + for (int i = db->getSize(); i < iterationCap; i++){ + cout << i << endl; + State* qsample = foptGlobalStep(); //generate a new bias sample + State* qnear = db->nearestNode(qsample); //Find closest node to the objective + State* qnew = localStep(i, qnear); + system->eval(qnew, 0); //simulate the circuit with the new input + //bias.push_back(qsample); + //updateError(score(qnew, max, min)); + //updateReward(qnear, qnew); + //updateSensitivity(qnear, qnew); + insert(i, qnew, qnear); + } +} + +Search* Duplex::getDB(){ + return db; +} + +System* Duplex::getSystem(){ + return system; +} + + +void Duplex::setStat(Stat* s){ + stat = s; +} + +Stat* Duplex::getStat(){ + return stat; +} + +void Duplex::save(boost::property_tree::ptree* ptree){ + ptree->add("duplex.version", 1); + boost::property_tree::ptree& data = ptree->add("duplex.data", ""); + db->save(&data); + boost::property_tree::ptree& sense = ptree->add("duplex.sensitivity", ""); + stat->sensitivity->save(&sense); + boost::property_tree::ptree& node = ptree->add("duplex.stat", ""); + + for (int i = 0; igetSize(); i++){ + boost::property_tree::ptree& iter = node.add("iteration", ""); + iter.add("id", i); + iter.add("error", stat->getError(i)); + iter.add("distance", stat->getDistance(i)); + iter.put(".id", i); + } +} + +void Duplex::load(boost::property_tree::ptree* ptree){ + boost::property_tree::ptree& data = ptree->get_child("duplex.data"); + db->load(&data); + auto pnodes = ptree->get_child("duplex.stat"); + for(auto v: pnodes){ + if(v.first=="iteration"){ + cout << v.second.get("id") << endl; + stat->error_push_back(v.second.get("error")); + stat->distance_push_back(v.second.get("distance")); + } + } +} diff --git a/src/duplex_old.h b/src/duplex_old.h new file mode 100755 index 0000000..7332b8c --- /dev/null +++ b/src/duplex_old.h @@ -0,0 +1,95 @@ +// +// duplex.h +// duplex +// +// Created by Adel Ahmadyan on 4/23/15. +// Copyright (c) 2015 Adel Ahmadyan. All rights reserved. +// +#pragma once +#include + +#include "configuration.h" +#include "system.h" +#include "search.h" +#include "stat.h" +#include "data.h" + +enum class Temperature { temperatureexp, temperaturefast, temperatureboltz }; +enum class Annealing { annealingfast, annealingboltz, annealingfastrandom, annealingboltzrandom}; + +class Duplex{ + Settings* settings; + Search* db; + Stat* stat; + + //System: + State* root; + State* goal; + System* system; + double* max; + double* min; + double* opt; + int parameterDimension; + int objectiveDimension; + + //Duplex options + int iterationCap; + Temperature temperatureOption; + Annealing annealingOption; + double t0; //initial temperature + double temperature; //current temperature + double stepLength; + double initialStepLength; + bool reinforcementLearningOption; + bool shrinkGoalRegionWithTemperateOption; + int nextCandidateParameter; + + //Duplex outputs: + + double minAward; + double maxAward; + double delta; + double* goalRegionBoxMin; + double* goalRegionBoxMax; + double* parameterMin; + double* parameterMax; + vector objectiveType; + +public: + Duplex(Settings*); + ~Duplex(); + double* getInitialState(); + void setSystem(System*); + void initialize(); + void setObjective(); + + void walkOptimizer(); + void optimize(); + void simulated_annealing(); + void functionalOptimization(); + void treeOptimizer(); + + //plotting methods + string draw(int); + string drawParameterTree(int x, int y, int z, string sizingPreference, string plotType, string title); + string drawObjectiveTree(int, int, string); + string drawTrace(int x, int y, string title); + string drawCanvas(string function, double xmin, double xmax, double ymin, double ymax, string, string); + + void insert(int i, State* qnear, State* qnew); + double score(State*, double*, double*); + double* generateNewInput(State* q); + State* localStep(int i, State*); + State* globalStep(); + State* foptGlobalStep(); + + void computeTemperature(int i); + double computeStepLength(); + int computeNextCandidateParameter(State* qnear); + Search* getDB(); + System* getSystem(); + void save(boost::property_tree::ptree* ptree); + void load(boost::property_tree::ptree* ptree); + void setStat(Stat*); + Stat* getStat(); +}; \ No newline at end of file diff --git a/src/functionalOptimizer.cpp b/src/functionalOptimizer.cpp new file mode 100755 index 0000000..fb2f8d9 --- /dev/null +++ b/src/functionalOptimizer.cpp @@ -0,0 +1,7 @@ +#include "functionalOptimizer.h" + +FunctionalOptimizer::FunctionalOptimizer(Settings* s){ +} + +FunctionalOptimizer::~FunctionalOptimizer(){ +} \ No newline at end of file diff --git a/src/functionalOptimizer.h b/src/functionalOptimizer.h new file mode 100755 index 0000000..5ff62ea --- /dev/null +++ b/src/functionalOptimizer.h @@ -0,0 +1,11 @@ +#pragma once +#include +#include "duplex.h" + +using namespace std; + +class FunctionalOptimizer{ +public: + FunctionalOptimizer(Settings* s); + ~FunctionalOptimizer(); +}; diff --git a/src/main.cpp b/src/main.cpp index acbdddd..7433f28 100755 --- a/src/main.cpp +++ b/src/main.cpp @@ -18,6 +18,8 @@ #include #include +#include "nonconvexoptimizer.h" + using namespace config4cpp; #if (_MSC_VER == 1900) //Visual studio 2015 @@ -97,16 +99,13 @@ int main(int argc, char** argv){ log << "Parsing config file complete." << endl ; // Duplex main code - System* system = new System(settings); - Duplex* duplex = new Duplex(settings); log << "Duplex core created." << endl ; + Duplex* duplex = new NonconvexOptimizer(settings); log << "Duplex core created." << endl ; Clustering* clustering; boost::property_tree::ptree ptree; //used to load/save the data //determining which algorithm to execute auto m = getMode(settings); - duplex->setSystem(system); log << "System set." ; - duplex->setObjective(); log << "Objective set. " ; duplex->initialize(); log << "Duplex initialization complete." << endl ; log.tick(); Stat* stat = duplex->getStat(); @@ -119,29 +118,29 @@ int main(int argc, char** argv){ break; // ----------------------------------------------------- case mode::duplex: - duplex->optimize(); + duplex->train(); break; // ----------------------------------------------------- case mode::fopt: - duplex->functionalOptimization(); + //duplex->functionalOptimization(); break; // ----------------------------------------------------- case mode::opt: - duplex->optimize(); + duplex->train(); break; // ----------------------------------------------------- case mode::sa: - duplex->simulated_annealing(); + //duplex->simulated_annealing(); break; // ----------------------------------------------------- case mode::clustering: clustering = new Clustering(settings); - clustering->train(settings->lookupString("clustering.mode")); + delete clustering; break; // ----------------------------------------------------- @@ -176,7 +175,6 @@ int main(int argc, char** argv){ //clean-up delete duplex; - delete system; delete settings; }catch (SettingsException se){ std::cerr << "CONFIG ERROR: " << se.what() << std::endl << std::endl; diff --git a/src/nonconvexOptimizer.cpp b/src/nonconvexOptimizer.cpp new file mode 100755 index 0000000..a61c6a2 --- /dev/null +++ b/src/nonconvexOptimizer.cpp @@ -0,0 +1,239 @@ +#include "nonconvexOptimizer.h" +#include + +NonconvexOptimizer::NonconvexOptimizer(Settings* s):Duplex(s){ +} + +NonconvexOptimizer::~NonconvexOptimizer(){ +} + + +double* NonconvexOptimizer::getInitialState(){ + double* init = new double[parameterDimension]; + bool initialStateAssignmentIsRandom = settings->check("initial_state_assignment", "random"); + + vector parameters = settings->listVariables("parameter", "uid-parameter"); + for(int i=0;ilookupFloat(("parameter." + parameters[i] + ".range.min").c_str()); + double max = settings->lookupFloat(("parameter." + parameters[i] + ".range.max").c_str()); + init[i] = (max - min)*((1.0*rand()) / RAND_MAX) + min; + }else{ + init[i] = settings->lookupFloat(("parameter." + parameters[i] + ".init").c_str()); + } + } + return init; +} + + +void NonconvexOptimizer::setObjective(){ + goal = new State(parameterDimension, objectiveDimension); + vector objectives = settings->listVariables("objective", "uid-objective"); + for (int i = 0; i < objectiveDimension; i++){ + string val = settings->lookupString(("objective." + objectives[i] + ".goal.optimum").c_str()); + goal->setObjective(stod(val), i); + } +} + +State* NonconvexOptimizer::initialize(){ + setObjective(); + cout << "Duplex initialization started. It make take a while to analyze the root." << endl; + double* init = getInitialState(); + double* reward = new double[parameterDimension]; + + for (int i = 0; i < parameterDimension; i++) + reward[i] = 1; + + //setting boundaries for the objectives + objectiveType = settings->listValues("objective", "uid-objective.goal.mode"); + vector objectiveGoalMinStringVector = settings->listValues("objective", "uid-objective.goal.min"); + vector objectiveGoalMaxStringVector = settings->listValues("objective", "uid-objective.goal.max"); + vector objectiveMinStringVector = settings->listValues("objective", "uid-objective.min"); + vector objectiveMaxStringVector = settings->listValues("objective", "uid-objective.max"); + vector objectiveOptimumStringVector = settings->listValues("objective", "uid-objective.goal.optimum"); + goalRegionBoxMin = new double[objectiveDimension]; + goalRegionBoxMax = new double[objectiveDimension]; + max = new double[objectiveDimension]; + min = new double[objectiveDimension]; + opt = new double[objectiveDimension]; + for (int i = 0; i parametersMinStringVector = settings->listValues("parameter", "uid-parameter.range.min"); + vector parametersMaxStringVector = settings->listValues("parameter", "uid-parameter.range.max"); + parameterMin = new double[parameterDimension]; + parameterMax = new double[parameterDimension]; + for (int i = 0; i < parameterDimension; i++){ + parameterMin[i] = stod(parametersMinStringVector[i]); + parameterMax[i] = stod(parametersMaxStringVector[i]); + } + + stat = new Stat(settings, max, min, opt); + cout << "Statistic class initialized." << endl; + + //Setting up the root node + State* root = new State(parameterDimension, objectiveDimension); + root->setParameter(init); + root->setReward(reward, (double)parameterDimension); + root->setID(0); + root->setParentID(-1); + system->eval(root); + double distance = score(root, max, min); + stat->updateError(distance); + //db->insert(root); + cout << "Root node set." << endl; + return root; +} + +void NonconvexOptimizer::insert(State* s){ + db->insert(s); +} + +State* NonconvexOptimizer::globalStep(){ + State* qsample = new State(parameterDimension, objectiveDimension); + qsample->setParameter(qsample->uniformRandomVector(parameterDimension, goalRegionBoxMin, goalRegionBoxMax)); + if (shrinkGoalRegionWithTemperateOption){ + double* objective = new double[objectiveDimension]; + for (int i = 0; i < objectiveDimension; i++){ + delta = abs(goalRegionBoxMax[i] - goalRegionBoxMin[i]); + objective[i] = qsample->unifRand(opt[i] - temperature*delta, opt[i] + temperature*delta); + } + qsample->setObjective(objective); + }else{ + qsample->setObjective(qsample->uniformRandomVector(objectiveDimension, goalRegionBoxMin, goalRegionBoxMax)); + } + return db->nearestNode(qsample); +} + +int NonconvexOptimizer::computeNextCandidateParameter(State* qnear){ + nextCandidateParameter = rand() % qnear->getParameterSize(); + return nextCandidateParameter; +} + +void NonconvexOptimizer::computeTemperature(int i){ + switch (temperatureOption){ + case Temperature::temperaturefast: + temperature = t0 * (1- (1.0*i) / iterationCap ); + break; + case Temperature::temperatureboltz: + temperature = t0 / log(i+2); + break; + case Temperature::temperatureexp: + temperature = temperature*0.95; + break; + } + if (temperature < 0.05) temperature = 0.05; +} + +double NonconvexOptimizer::computeStepLength(){ + double maxStep = initialStepLength * rand() / double(RAND_MAX); + int direction = 1; if (rand() % 2 == 0) direction = -1; + switch (annealingOption){ + case Annealing::annealingfast: + stepLength = direction * initialStepLength * temperature; + break; + case Annealing::annealingboltz: + stepLength = direction * initialStepLength * sqrt(temperature); + break; + case Annealing::annealingboltzrandom: + stepLength = direction * maxStep * sqrt(temperature); + break; + case Annealing::annealingfastrandom: + stepLength = direction * maxStep * temperature; + break; + } + return stepLength; + +} + +double* NonconvexOptimizer::generateNewInput(State* q){ + int pSize = q->getParameterSize(); + double* param = q->getParameter(); + double* input = new double[pSize]; + for (int i = 0; i < pSize; i++){ + input[i] = param[i] + computeStepLength(); + if (input[i] < parameterMin[i]) input[i] = parameterMin[i]; + if (input[i] > parameterMax[i]) input[i] = parameterMax[i]; + } + //input[nextCandidateParameter] += stepLength; + //if (input[nextCandidateParameter] < parameterMin[nextCandidateParameter]) input[nextCandidateParameter] = parameterMin[nextCandidateParameter]; + //if (input[nextCandidateParameter] > parameterMax[nextCandidateParameter]) input[nextCandidateParameter] = parameterMax[nextCandidateParameter]; + return input; +} + +State* NonconvexOptimizer::localStep(int i, State* qnear){ + computeTemperature(i); + //auto nextCandidateParameter = computeNextCandidateParameter(qnear); + double* input = generateNewInput(qnear); + State* qnew = new State(parameterDimension, objectiveDimension); + qnew->setParameter(input); + qnew->setParent(qnear); + return qnew; +} + +double NonconvexOptimizer::evaluate(State* qnew){ + system->eval(qnew, 0); //simulate the circuit with the + score(qnew, max, min); + return qnew->getScore(); +} + +// Evaluates each states and assign them a score. +// States with lower score are better candidates, state with the minimum score (0) is the optimal solution. +double NonconvexOptimizer::score(State* state, double* maxBound, double* minBound){ + double distance = 0; + // In functional optimization, we have to evaluat each objective seperately. + if (settings->check("mode", "fopt")){ + double b = settings->lookupFloat("parameter.b"); + double c0 = settings->lookupFloat("parameter.c0"); + double* boundary = new double[2]; + boundary[0] = b; + boundary[1] = 0; + + double* objectives = state->getObjective(); + double* goals = goal->getObjective(); + int objectiveSize = state->getObjectiveSize(); + //measure objective + //currently we support the following types of objectives + //1. boundary: equivalent to the boundary conditions in BVPs. The closer we get to the boundary value, the better + //2. boundary-strict: similar to boundary, but if we are crossing the bounary value the sameples is not usefull anymore. + // for example, for the length of the curve we use bounadry-hard objectives, so the length is strictly less than boundary + //3. maximize + //4. minimize + for (int i = 0; i < objectiveSize; i++){ + auto potentialdistance = state->distance(2, state->getParameter(), boundary); + auto potentialsum = (state->getParameter()[1] * (b-state->getParameter()[0]) ) / 2.0 ; + double normalizedDistance = (objectives[i] - goals[i]) / (max[i] - min[i]); + normalizedDistance *= normalizedDistance; + if ((objectiveType[i] == "boundary-strict") + && (objectives[i]>opt[i]) //if the length of the curve is bigger than the strict boundary, discard this trace + && (objectives[i]+potentialdistance>opt[i]) //evaluating the potential of this sample: if we even take the direct route to the boundary + //condition and still the length is more than the bondary, discard this sample + ){ + normalizedDistance = std::numeric_limits::max(); + } + if ((objectiveType[i] == "maximize") + && (objectives[i]>max[i]) + && (objectives[i] + potentialsum > max[i])){ + normalizedDistance = 0; + } + distance += normalizedDistance; + } + delete[] boundary; //todo: remove this + }else{ + //Most of the time, we can use Eucledean distance as the score. Closer to the goal, the better + distance = goal->distance(state, maxBound, minBound); + } + + state->setScore(distance); + return distance; +} + +bool NonconvexOptimizer::isConverged(int iteration){ + return iteration +#include "duplex.h" + +using namespace std; + +class NonconvexOptimizer: public Duplex{ +public: + NonconvexOptimizer(Settings* s); + ~NonconvexOptimizer(); + + double score(State* state, double* maxBound, double* minBound); + double* getInitialState(); + void setObjective(); + + State* initialize(); + void insert(State* s); + State* globalStep(); + State* localStep(int, State*); + double evaluate(State*); + bool isConverged(int); + + void computeTemperature(int i); + int computeNextCandidateParameter(State* qnear); + double* generateNewInput(State* q); + double computeStepLength(); +}; diff --git a/src/systemOptimizer.cpp b/src/systemOptimizer.cpp new file mode 100755 index 0000000..4c95f5e --- /dev/null +++ b/src/systemOptimizer.cpp @@ -0,0 +1,7 @@ +#include "systemOptimizer.h" + +SystemOptimizer::SystemOptimizer(Settings* s){ +} + +SystemOptimizer::~SystemOptimizer(){ +} \ No newline at end of file diff --git a/src/systemOptimizer.h b/src/systemOptimizer.h new file mode 100755 index 0000000..3bc80a8 --- /dev/null +++ b/src/systemOptimizer.h @@ -0,0 +1,11 @@ +#pragma once +#include +#include "duplex.h" + +using namespace std; + +class SystemOptimizer{ +public: + SystemOptimizer(Settings* s); + ~SystemOptimizer(); +};