Commit efec2e55 authored by Dmitry Bagaev's avatar Dmitry Bagaev
Browse files

bayesian opt added

parent d576a2fe
......@@ -171,7 +171,7 @@ int main(int argc, char **argv) {
if (rank == 0) std::cout << "Solving with " << solverName << std::endl;
TTSP::OptimizationParameter tau("tau", std::make_pair(-4, -1), 0.1, -3, TTSP::OptimizationParameterType::PARAMETER_TYPE_EXPONENT);
TTSP::OptimizationParameter tau("tau", std::make_pair(-3, -1.0), 0.05, -2, TTSP::OptimizationParameterType::PARAMETER_TYPE_EXPONENT);
//TTSP::OptimizationParameter q("q", {0, 1, 2, 3, 4}, 2);
//TTSP::OptimizationParameter eps("eps", {1e-7, 1e-6, 1e-5, 1e-4, 1e-3}, 1e-5);
TTSP::OptimizationParameterEntries entries;
......@@ -202,13 +202,15 @@ int main(int argc, char **argv) {
std::exit(0);
}
TTSP::Optimizers::SaveOptimizerOrReplace("test", optimizerType, parameters, properties, 50);
TTSP::Optimizers::SaveOptimizerOrReplace("test", optimizerType, parameters, properties, 15);
TTSP::OptimizerInterface *topt = TTSP::Optimizers::GetOptimizer(optimizerType, parameters, properties, 50);
TTSP::OptimizerInterface *topt = TTSP::Optimizers::GetSavedOptimizer("test");
topt->SetVerbosityLevel(TTSP::OptimizerVerbosityLevel::Level3);
topt->SetRestartStrategy(TTSP::OptimizerRestartStrategy::RESTART_STRATEGY_WITH_BEST, 10);
double metrics_total = 0.0;
while (!series.end()) {
std::pair<const char *, const char *> next = series.next();
......@@ -270,9 +272,11 @@ int main(int argc, char **argv) {
bool is_good = result.first;
double metrics = result.second;
metrics_total += metrics;
optimizer->SaveResult(suggestion, metrics, is_good);
TTSP::OptimizerVerbosityLevel verbosity = TTSP::OptimizerVerbosityLevel::Level1;
TTSP::OptimizerVerbosityLevel verbosity = TTSP::OptimizerVerbosityLevel::Level3;
// On Level1 print some metadata information about solution and used parameters
if (rank == 0 && verbosity > TTSP::OptimizerVerbosityLevel::Level0) {
......@@ -323,6 +327,8 @@ int main(int argc, char **argv) {
INMOST::MPIBarrier();
}
std::cout << "Metrics total from " << series.size() << " iterations: " << metrics_total << " (mean = " << metrics_total / series.size() << ")" << std::endl;
}
Solver::Finalize(); // Finalize solver and close MPI activity
......
......@@ -73,3 +73,7 @@ std::pair<const char *, const char *> MatrixSeries::next() {
current += 1;
return std::make_pair(matrix, rhs);
}
int MatrixSeries::size() const {
return matrices.size();
}
......@@ -24,6 +24,8 @@ public:
void restart();
int size() const;
std::pair<const char *, const char *> next();
};
......
......@@ -14,12 +14,16 @@
#include <Eigen/Core>
#include <limbo/kernel/exp.hpp>
#include <limbo/kernel/squared_exp_ard.hpp>
#include <limbo/mean/data.hpp>
#include <limbo/mean/function_ard.hpp>
#include <limbo/model/gp.hpp>
#include <limbo/model/gp/kernel_lf_opt.hpp>
#include <limbo/tools.hpp>
#include <limbo/tools/macros.hpp>
#include <limbo/bayes_opt/bo_base.hpp>
#include <limbo/bayes_opt/boptimizer.hpp>
#include <limbo/model/gp/kernel_mean_lf_opt.hpp>
#include <limbo/acqui/gp_ucb.hpp>
#include <limbo/acqui/ei.hpp>
namespace TTSP {
......@@ -36,12 +40,6 @@ namespace TTSP {
}
struct Params {
struct kernel_exp {
BO_PARAM(double, sigma_sq, 1.0);
BO_PARAM(double, l, 0.2);
};
struct kernel : public limbo::defaults::kernel {
};
struct kernel_squared_exp_ard : public limbo::defaults::kernel_squared_exp_ard {
......@@ -52,11 +50,13 @@ namespace TTSP {
};
struct acqui_ucb : public limbo::defaults::acqui_ucb {
};
struct acqui_ei : public limbo::defaults::acqui_ei {
};
};
using Kernel2_t = limbo::kernel::SquaredExpARD<Params>;
using Mean_t = limbo::mean::Data<Params>;
using GP2_t = limbo::model::GP<Params, Kernel2_t, Mean_t, limbo::model::gp::KernelLFOpt<Params>>;
using Mean_t = limbo::mean::FunctionARD<Params, limbo::mean::Data<Params>>;
using GP2_t = limbo::model::GP<Params, Kernel2_t, Mean_t, limbo::model::gp::KernelMeanLFOpt<Params>>;
GP2_t gp_ard;
// do not forget to call the optimization!
......@@ -79,10 +79,11 @@ namespace TTSP {
});
samples.push_back(sample);
observations.push_back(limbo::tools::make_vector(result.GetMetricsAfter()));
observations.push_back(limbo::tools::make_vector(-1000.0 * result.GetMetricsAfter()));
});
gp_ard.compute(samples, observations, false);
gp_ard.compute(samples, observations);
gp_ard.optimize_hyperparams();
using acquiopt_t = limbo::opt::NLOptNoGrad<Params, nlopt::GN_DIRECT_L_RAND>;
......@@ -104,13 +105,15 @@ namespace TTSP {
double min_bound = parameter.GetMinimalValue();
double max_bound = parameter.GetMaximumValue();
starting_point(i) = (entry.second - min_bound) / (max_bound - min_bound);
starting_point(i) = (entry.first.GetDefaultValue() - min_bound) / (max_bound - min_bound);
i += 1;
});
Eigen::VectorXd new_sample = acquiopt(acqui_optimization, starting_point, true);
std::cout << "Sample: " << new_sample << std::endl;
for (int k = 0; k < parameters.Size(); ++k) {
auto parameter = parameters.GetParameter(static_cast<size_t>(k));
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment