Commit def8afad authored by davidkep's avatar davidkep

Improve default step-size for ADMM and improve tightening for the MM algorithm

parent fbb3a219
......@@ -186,7 +186,7 @@ class Metrics<1> {
explicit Metrics(const std::string& name) noexcept : name_(name) {}
Metrics(const Metrics& other) noexcept : name_(other.name_), metrics_(other.metrics_),
has_metrics_(other.has_metrics_) {
has_metrics_(other.has_metrics_) {
// We are not sure if there's actual data in the sub-metrics. Thin the empty ones out.
for (auto&& sub_metric : other.sub_metrics_) {
if (sub_metric.CheckMetrics() == HasMetrics::kYes) {
......
This diff is collapsed.
This diff is collapsed.
......@@ -21,7 +21,7 @@ namespace nsoptim {
enum class OptimumStatus { kOk, kWarning, kError };
namespace optimum_internal {
using ConstMetricsPtr = std::unique_ptr<const Metrics>;
using MetricsPtr = std::unique_ptr<Metrics>;
//! Wrapper around the information at an optimum point.
template <typename T, typename U, typename V>
......@@ -32,17 +32,18 @@ struct Optimum {
Optimum(const LossFunction& _loss, const PenaltyFunction& _penalty) noexcept : loss(_loss), penalty(_penalty) {}
Optimum(const LossFunction& _loss, const PenaltyFunction& _penalty, const Coefficients& _coefs,
const double _objf_value, ConstMetricsPtr _metrics,
const double _objf_value, MetricsPtr _metrics,
const OptimumStatus _status, const std::string& _message) noexcept
: loss(_loss), penalty(_penalty), coefs(_coefs), objf_value(_objf_value), metrics(std::move(_metrics)),
status(_status), message(_message) {}
Optimum(const Optimum& other) noexcept : loss(other.loss), penalty(other.penalty), coefs(other.coefs),
objf_value(other.objf_value), metrics(nullptr), status(other.status),
message(other.message) {}
objf_value(other.objf_value),
metrics(other.metrics ? new Metrics(*other.metrics) : nullptr),
status(other.status), message(other.message) {}
Optimum(Optimum&& other) = default;
//! Move-assignable operatur must be explicity defined.
//! Move-assignable operator must be explicity defined.
Optimum& operator=(Optimum&& other) {
loss = std::move(other.loss);
penalty = std::move(other.penalty);
......@@ -63,7 +64,7 @@ struct Optimum {
//! The value of the objective function at this optimum.
double objf_value = std::numeric_limits<double>::max();
//! Optional metrics associated with this optimum.
ConstMetricsPtr metrics;
MetricsPtr metrics;
//! The status of the optimizer at the time this optimum was found.
OptimumStatus status = OptimumStatus::kError;
//! An optional status message of the optimizer at the time this optimum was found.
......@@ -82,7 +83,7 @@ using Optimum = optimum_internal::Optimum<typename std::decay<LossFunction>::typ
template <typename LossFunction, typename PenaltyFunction, typename Coefficients>
Optimum<LossFunction, PenaltyFunction, Coefficients> MakeOptimum(
const LossFunction& loss, const PenaltyFunction& penalty, const Coefficients& coefs,
const double objf_value, optimum_internal::ConstMetricsPtr metrics,
const double objf_value, optimum_internal::MetricsPtr metrics,
const OptimumStatus status = OptimumStatus::kOk, const std::string& message = {}) noexcept {
return Optimum<LossFunction, PenaltyFunction, Coefficients>(loss, penalty, coefs, objf_value, std::move(metrics),
status, message);
......@@ -91,7 +92,7 @@ Optimum<LossFunction, PenaltyFunction, Coefficients> MakeOptimum(
template <typename LossFunction, typename PenaltyFunction, typename Coefficients>
Optimum<LossFunction, PenaltyFunction, Coefficients> MakeOptimum(
const LossFunction& loss, const PenaltyFunction& penalty, const Coefficients& coefs,
optimum_internal::ConstMetricsPtr metrics,
optimum_internal::MetricsPtr metrics,
const OptimumStatus status = OptimumStatus::kOk, const std::string& message = {}) noexcept {
return Optimum<LossFunction, PenaltyFunction, Coefficients>(loss, penalty, coefs, loss(coefs) + penalty(coefs),
std::move(metrics), status, message);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment