WPILibC++ 2027.0.0-alpha-5
Loading...
Searching...
No Matches
problem.hpp
Go to the documentation of this file.
1// Copyright (c) Sleipnir contributors
2
3#pragma once
4
5#include <algorithm>
6#include <array>
7#include <cmath>
8#include <concepts>
9#include <functional>
10#include <iterator>
11#include <memory>
12#include <optional>
13#include <ranges>
14#include <utility>
15
16#include <Eigen/Core>
17#include <Eigen/SparseCore>
18#include <fmt/chrono.h>
19#include <gch/small_vector.hpp>
20
28#include "sleipnir/optimization/solver/interior_point.hpp"
30#include "sleipnir/optimization/solver/newton.hpp"
32#include "sleipnir/optimization/solver/sqp.hpp"
38#include "sleipnir/util/spy.hpp"
41
42namespace slp {
43
44/// This class allows the user to pose a constrained nonlinear optimization
45/// problem in natural mathematical notation and solve it.
46///
47/// This class supports problems of the form:
48///
49/// ```
50/// minₓ f(x)
51/// subject to cₑ(x) = 0
52/// cᵢ(x) ≥ 0
53/// ```
54///
55/// where f(x) is the scalar cost function, x is the vector of decision
56/// variables (variables the solver can tweak to minimize the cost function),
57/// cᵢ(x) are the inequality constraints, and cₑ(x) are the equality
58/// constraints. Constraints are equations or inequalities of the decision
59/// variables that constrain what values the solver is allowed to use when
60/// searching for an optimal solution.
61///
62/// The nice thing about this class is users don't have to put their system in
63/// the form shown above manually; they can write it in natural mathematical
64/// form and it'll be converted for them.
65///
66/// @tparam Scalar Scalar type.
67template <typename Scalar>
68class Problem {
69 public:
70 /// Constructs the optimization problem.
71 Problem() noexcept = default;
72
73 /// Creates a decision variable in the optimization problem.
74 ///
75 /// Decision variables have an initial value of zero.
76 ///
77 /// @return A decision variable in the optimization problem.
78 [[nodiscard]]
80 m_decision_variables.emplace_back();
81 return m_decision_variables.back();
82 }
83
84 /// Creates a matrix of decision variables in the optimization problem.
85 ///
86 /// Decision variables have an initial value of zero.
87 ///
88 /// @param rows Number of matrix rows.
89 /// @param cols Number of matrix columns.
90 /// @return A matrix of decision variables in the optimization problem.
91 [[nodiscard]]
92 VariableMatrix<Scalar> decision_variable(int rows, int cols = 1) {
93 m_decision_variables.reserve(m_decision_variables.size() + rows * cols);
94
95 VariableMatrix<Scalar> vars{detail::empty, rows, cols};
96
97 for (int row = 0; row < rows; ++row) {
98 for (int col = 0; col < cols; ++col) {
99 m_decision_variables.emplace_back();
100 vars(row, col) = m_decision_variables.back();
101 }
102 }
103
104 return vars;
105 }
106
107 /// Creates a symmetric matrix of decision variables in the optimization
108 /// problem.
109 ///
110 /// Variable instances are reused across the diagonal, which helps reduce
111 /// problem dimensionality.
112 ///
113 /// Decision variables have an initial value of zero.
114 ///
115 /// @param rows Number of matrix rows.
116 /// @return A symmetric matrix of decision varaibles in the optimization
117 /// problem.
118 [[nodiscard]]
120 // We only need to store the lower triangle of an n x n symmetric matrix;
121 // the other elements are duplicates. The lower triangle has (n² + n)/2
122 // elements.
123 //
124 // n
125 // Σ k = (n² + n)/2
126 // k=1
127 m_decision_variables.reserve(m_decision_variables.size() +
128 (rows * rows + rows) / 2);
129
130 VariableMatrix<Scalar> vars{detail::empty, rows, rows};
131
132 for (int row = 0; row < rows; ++row) {
133 for (int col = 0; col <= row; ++col) {
134 m_decision_variables.emplace_back();
135 vars(row, col) = m_decision_variables.back();
136 vars(col, row) = m_decision_variables.back();
137 }
138 }
139
140 return vars;
141 }
142
143 /// Tells the solver to minimize the output of the given cost function.
144 ///
145 /// Note that this is optional. If only constraints are specified, the solver
146 /// will find the closest solution to the initial conditions that's in the
147 /// feasible set.
148 ///
149 /// @param cost The cost function to minimize. A 1x1 VariableMatrix will
150 /// implicitly convert to a Variable, and a non-1x1 VariableMatrix will
151 /// raise an assertion.
152 void minimize(const Variable<Scalar>& cost) { m_f = cost; }
153
154 /// Tells the solver to minimize the output of the given cost function.
155 ///
156 /// Note that this is optional. If only constraints are specified, the solver
157 /// will find the closest solution to the initial conditions that's in the
158 /// feasible set.
159 ///
160 /// @param cost The cost function to minimize. A 1x1 VariableMatrix will
161 /// implicitly convert to a Variable, and a non-1x1 VariableMatrix will
162 /// raise an assertion.
163 void minimize(Variable<Scalar>&& cost) { m_f = std::move(cost); }
164
165 /// Tells the solver to maximize the output of the given objective function.
166 ///
167 /// Note that this is optional. If only constraints are specified, the solver
168 /// will find the closest solution to the initial conditions that's in the
169 /// feasible set.
170 ///
171 /// @param objective The objective function to maximize. A 1x1 VariableMatrix
172 /// will implicitly convert to a Variable, and a non-1x1 VariableMatrix
173 /// will raise an assertion.
174 void maximize(const Variable<Scalar>& objective) {
175 // Maximizing a cost function is the same as minimizing its negative
176 m_f = -objective;
177 }
178
179 /// Tells the solver to maximize the output of the given objective function.
180 ///
181 /// Note that this is optional. If only constraints are specified, the solver
182 /// will find the closest solution to the initial conditions that's in the
183 /// feasible set.
184 ///
185 /// @param objective The objective function to maximize. A 1x1 VariableMatrix
186 /// will implicitly convert to a Variable, and a non-1x1 VariableMatrix
187 /// will raise an assertion.
188 void maximize(Variable<Scalar>&& objective) {
189 // Maximizing a cost function is the same as minimizing its negative
190 m_f = -std::move(objective);
191 }
192
193 /// Tells the solver to solve the problem while satisfying the given equality
194 /// constraint.
195 ///
196 /// @param constraint The constraint to satisfy.
197 void subject_to(const EqualityConstraints<Scalar>& constraint) {
198 m_equality_constraints.reserve(m_equality_constraints.size() +
199 constraint.constraints.size());
200 std::ranges::copy(constraint.constraints,
201 std::back_inserter(m_equality_constraints));
202 }
203
204 /// Tells the solver to solve the problem while satisfying the given equality
205 /// constraint.
206 ///
207 /// @param constraint The constraint to satisfy.
209 m_equality_constraints.reserve(m_equality_constraints.size() +
210 constraint.constraints.size());
211 std::ranges::copy(constraint.constraints,
212 std::back_inserter(m_equality_constraints));
213 }
214
215 /// Tells the solver to solve the problem while satisfying the given
216 /// inequality constraint.
217 ///
218 /// @param constraint The constraint to satisfy.
220 m_inequality_constraints.reserve(m_inequality_constraints.size() +
221 constraint.constraints.size());
222 std::ranges::copy(constraint.constraints,
223 std::back_inserter(m_inequality_constraints));
224 }
225
226 /// Tells the solver to solve the problem while satisfying the given
227 /// inequality constraint.
228 ///
229 /// @param constraint The constraint to satisfy.
231 m_inequality_constraints.reserve(m_inequality_constraints.size() +
232 constraint.constraints.size());
233 std::ranges::copy(constraint.constraints,
234 std::back_inserter(m_inequality_constraints));
235 }
236
237 /// Returns the cost function's type.
238 ///
239 /// @return The cost function's type.
241 if (m_f) {
242 return m_f.value().type();
243 } else {
245 }
246 }
247
248 /// Returns the type of the highest order equality constraint.
249 ///
250 /// @return The type of the highest order equality constraint.
252 if (!m_equality_constraints.empty()) {
253 return std::ranges::max(m_equality_constraints, {},
255 .type();
256 } else {
258 }
259 }
260
261 /// Returns the type of the highest order inequality constraint.
262 ///
263 /// @return The type of the highest order inequality constraint.
265 if (!m_inequality_constraints.empty()) {
266 return std::ranges::max(m_inequality_constraints, {},
268 .type();
269 } else {
271 }
272 }
273
274 /// Solves the optimization problem. The solution will be stored in the
275 /// original variables used to construct the problem.
276 ///
277 /// @param options Solver options.
278 /// @param spy Enables writing sparsity patterns of H, Aₑ, and Aᵢ to files
279 /// named H.spy, A_e.spy, and A_i.spy respectively during solve. Use
280 /// tools/spy.py to plot them.
281 /// @return The solver status.
282 ExitStatus solve(const Options& options = Options{},
283 [[maybe_unused]] bool spy = false) {
284 using DenseVector = Eigen::Vector<Scalar, Eigen::Dynamic>;
285 using SparseMatrix = Eigen::SparseMatrix<Scalar>;
286 using SparseVector = Eigen::SparseVector<Scalar>;
287
288 // Create the initial value column vector
289 DenseVector x{m_decision_variables.size()};
290 for (size_t i = 0; i < m_decision_variables.size(); ++i) {
291 x[i] = m_decision_variables[i].value();
292 }
293
294 if (options.diagnostics) {
295 print_exit_conditions(options);
296 print_problem_analysis();
297 }
298
299 // Get the highest order constraint expression types
300 auto f_type = cost_function_type();
301 auto c_e_type = equality_constraint_type();
302 auto c_i_type = inequality_constraint_type();
303
304 // If the problem is empty or constant, there's nothing to do
305 if (f_type <= ExpressionType::CONSTANT &&
306 c_e_type <= ExpressionType::CONSTANT &&
307 c_i_type <= ExpressionType::CONSTANT) {
308#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
309 if (options.diagnostics) {
310 slp::println("\nInvoking no-op solver\n");
311 }
312#endif
313 return ExitStatus::SUCCESS;
314 }
315
316 VariableMatrix<Scalar> x_ad{m_decision_variables};
317
318 // Set up cost function
319 Variable f = m_f.value_or(Scalar(0));
320
321 int num_decision_variables = m_decision_variables.size();
322 int num_equality_constraints = m_equality_constraints.size();
323 int num_inequality_constraints = m_inequality_constraints.size();
324
325 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
326 iteration_callbacks;
327 for (const auto& callback : m_iteration_callbacks) {
328 iteration_callbacks.emplace_back(callback);
329 }
330 for (const auto& callback : m_persistent_iteration_callbacks) {
331 iteration_callbacks.emplace_back(callback);
332 }
333
334 // Solve the optimization problem
335 ExitStatus status;
336 if (m_equality_constraints.empty() && m_inequality_constraints.empty()) {
337 if (options.diagnostics) {
338 slp::println("\nInvoking Newton solver\n");
339 }
340
341 gch::small_vector<SetupProfiler> ad_setup_profilers;
342 ad_setup_profilers.emplace_back("setup");
343 ad_setup_profilers.emplace_back("↳ ∇f(x)");
344 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
345
346 ad_setup_profilers[0].start();
347
348 // Set up gradient autodiff
349 ad_setup_profilers[1].start();
350 Gradient g{f, x_ad};
351 ad_setup_profilers[1].stop();
352
353 // Set up Lagrangian Hessian autodiff
354 ad_setup_profilers[2].start();
355 Hessian<Scalar, Eigen::Lower> H{f, x_ad};
356 ad_setup_profilers[2].stop();
357
358 ad_setup_profilers[0].stop();
359
360 if (options.diagnostics) {
361 print_setup_diagnostics(ad_setup_profilers);
362 }
363
364#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
365 // Sparsity pattern files written when spy flag is set
366 std::unique_ptr<Spy<Scalar>> H_spy;
367
368 if (spy) {
369 H_spy = std::make_unique<Spy<Scalar>>(
370 "H.spy", "Hessian", "Decision variables", "Decision variables",
371 num_decision_variables, num_decision_variables);
372 iteration_callbacks.push_back(
373 [&](const IterationInfo<Scalar>& info) -> bool {
374 H_spy->add(info.H);
375 return false;
376 });
377 }
378#endif
379
380 NewtonMatrixCallbacks<Scalar> matrix_callbacks{
381 num_decision_variables,
382 [&](const DenseVector& x) -> Scalar {
383 x_ad.set_value(x);
384 return f.value();
385 },
386 [&](const DenseVector& x) -> SparseVector {
387 x_ad.set_value(x);
388 return g.value();
389 },
390 [&](const DenseVector& x) -> SparseMatrix {
391 x_ad.set_value(x);
392 return H.value();
393 }};
394
395 // Invoke Newton solver
396 status =
397 newton<Scalar>(matrix_callbacks, iteration_callbacks, options, x);
398 } else if (m_inequality_constraints.empty()) {
399 if (options.diagnostics) {
400 slp::println("\nInvoking SQP solver\n");
401 }
402
403 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
404 VariableMatrix<Scalar> y_ad(num_equality_constraints);
405
406 gch::small_vector<SetupProfiler> ad_setup_profilers;
407 ad_setup_profilers.emplace_back("setup");
408 ad_setup_profilers.emplace_back("↳ ∇f(x)");
409 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
410 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_f");
411 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_c");
412 ad_setup_profilers.emplace_back("↳ ∂cₑ/∂x");
413
414 ad_setup_profilers[0].start();
415
416 // Set up gradient autodiff
417 ad_setup_profilers[1].start();
418 Gradient g{f, x_ad};
419 ad_setup_profilers[1].stop();
420
421 ad_setup_profilers[2].start();
422
423 // Set up cost part of Lagrangian Hessian autodiff
424 ad_setup_profilers[3].start();
425 Hessian<Scalar, Eigen::Lower> H_f{f, x_ad};
426 ad_setup_profilers[3].stop();
427
428 // Set up constraint part of Lagrangian Hessian autodiff
429 ad_setup_profilers[4].start();
430 Hessian<Scalar, Eigen::Lower> H_c{-y_ad.T() * c_e_ad, x_ad};
431 ad_setup_profilers[4].stop();
432
433 ad_setup_profilers[2].stop();
434
435 // Set up equality constraint Jacobian autodiff
436 ad_setup_profilers[5].start();
437 Jacobian A_e{c_e_ad, x_ad};
438 ad_setup_profilers[5].stop();
439
440 ad_setup_profilers[0].stop();
441
442 if (options.diagnostics) {
443 print_setup_diagnostics(ad_setup_profilers);
444 }
445
446#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
447 // Sparsity pattern files written when spy flag is set
448 std::unique_ptr<Spy<Scalar>> H_spy;
449 std::unique_ptr<Spy<Scalar>> A_e_spy;
450
451 if (spy) {
452 H_spy = std::make_unique<Spy<Scalar>>(
453 "H.spy", "Hessian", "Decision variables", "Decision variables",
454 num_decision_variables, num_decision_variables);
455 A_e_spy = std::make_unique<Spy<Scalar>>(
456 "A_e.spy", "Equality constraint Jacobian", "Constraints",
457 "Decision variables", num_equality_constraints,
458 num_decision_variables);
459 iteration_callbacks.push_back(
460 [&](const IterationInfo<Scalar>& info) -> bool {
461 H_spy->add(info.H);
462 A_e_spy->add(info.A_e);
463 return false;
464 });
465 }
466#endif
467
468 SQPMatrixCallbacks<Scalar> matrix_callbacks{
469 num_decision_variables,
470 num_equality_constraints,
471 [&](const DenseVector& x) -> Scalar {
472 x_ad.set_value(x);
473 return f.value();
474 },
475 [&](const DenseVector& x) -> SparseVector {
476 x_ad.set_value(x);
477 return g.value();
478 },
479 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
480 x_ad.set_value(x);
481 y_ad.set_value(y);
482 return H_f.value() + H_c.value();
483 },
484 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
485 x_ad.set_value(x);
486 y_ad.set_value(y);
487 return H_c.value();
488 },
489 [&](const DenseVector& x) -> DenseVector {
490 x_ad.set_value(x);
491 return c_e_ad.value();
492 },
493 [&](const DenseVector& x) -> SparseMatrix {
494 x_ad.set_value(x);
495 return A_e.value();
496 }};
497
498 // Invoke SQP solver
499 status = sqp<Scalar>(matrix_callbacks, iteration_callbacks, options, x);
500 } else {
501 if (options.diagnostics) {
502 slp::println("\nInvoking IPM solver\n");
503 }
504
505 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
506 VariableMatrix<Scalar> c_i_ad{m_inequality_constraints};
507 VariableMatrix<Scalar> y_ad(num_equality_constraints);
508 VariableMatrix<Scalar> z_ad(num_inequality_constraints);
509
510 gch::small_vector<SetupProfiler> ad_setup_profilers;
511 ad_setup_profilers.emplace_back("setup");
512 ad_setup_profilers.emplace_back("↳ ∇f(x)");
513 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
514 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_f");
515 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_c");
516 ad_setup_profilers.emplace_back("↳ ∂cₑ/∂x");
517 ad_setup_profilers.emplace_back("↳ ∂cᵢ/∂x");
518
519 ad_setup_profilers[0].start();
520
521 // Set up gradient autodiff
522 ad_setup_profilers[1].start();
523 Gradient g{f, x_ad};
524 ad_setup_profilers[1].stop();
525
526 ad_setup_profilers[2].start();
527
528 // Set up cost part of Lagrangian Hessian autodiff
529 ad_setup_profilers[3].start();
530 Hessian<Scalar, Eigen::Lower> H_f{f, x_ad};
531 ad_setup_profilers[3].stop();
532
533 // Set up constraint part of Lagrangian Hessian autodiff
534 ad_setup_profilers[4].start();
535 Hessian<Scalar, Eigen::Lower> H_c{-y_ad.T() * c_e_ad - z_ad.T() * c_i_ad,
536 x_ad};
537 ad_setup_profilers[4].stop();
538
539 ad_setup_profilers[2].stop();
540
541 // Set up equality constraint Jacobian autodiff
542 ad_setup_profilers[5].start();
543 Jacobian A_e{c_e_ad, x_ad};
544 ad_setup_profilers[5].stop();
545
546 // Set up inequality constraint Jacobian autodiff
547 ad_setup_profilers[6].start();
548 Jacobian A_i{c_i_ad, x_ad};
549 ad_setup_profilers[6].stop();
550
551 ad_setup_profilers[0].stop();
552
553 if (options.diagnostics) {
554 print_setup_diagnostics(ad_setup_profilers);
555 }
556
557#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
558 // Sparsity pattern files written when spy flag is set
559 std::unique_ptr<Spy<Scalar>> H_spy;
560 std::unique_ptr<Spy<Scalar>> A_e_spy;
561 std::unique_ptr<Spy<Scalar>> A_i_spy;
562
563 if (spy) {
564 H_spy = std::make_unique<Spy<Scalar>>(
565 "H.spy", "Hessian", "Decision variables", "Decision variables",
566 num_decision_variables, num_decision_variables);
567 A_e_spy = std::make_unique<Spy<Scalar>>(
568 "A_e.spy", "Equality constraint Jacobian", "Constraints",
569 "Decision variables", num_equality_constraints,
570 num_decision_variables);
571 A_i_spy = std::make_unique<Spy<Scalar>>(
572 "A_i.spy", "Inequality constraint Jacobian", "Constraints",
573 "Decision variables", num_inequality_constraints,
574 num_decision_variables);
575 iteration_callbacks.push_back(
576 [&](const IterationInfo<Scalar>& info) -> bool {
577 H_spy->add(info.H);
578 A_e_spy->add(info.A_e);
579 A_i_spy->add(info.A_i);
580 return false;
581 });
582 }
583#endif
584
585 const auto [bound_constraint_mask, bounds, conflicting_bound_indices] =
586 get_bounds<Scalar>(m_decision_variables, m_inequality_constraints,
587 A_i.value());
588 if (!conflicting_bound_indices.empty()) {
589 if (options.diagnostics) {
591 conflicting_bound_indices);
592 }
594 }
595
596#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
598#endif
599
600 InteriorPointMatrixCallbacks<Scalar> matrix_callbacks{
601 num_decision_variables,
602 num_equality_constraints,
603 num_inequality_constraints,
604 [&](const DenseVector& x) -> Scalar {
605 x_ad.set_value(x);
606 return f.value();
607 },
608 [&](const DenseVector& x) -> SparseVector {
609 x_ad.set_value(x);
610 return g.value();
611 },
612 [&](const DenseVector& x, const DenseVector& y,
613 const DenseVector& z) -> SparseMatrix {
614 x_ad.set_value(x);
615 y_ad.set_value(y);
616 z_ad.set_value(z);
617 return H_f.value() + H_c.value();
618 },
619 [&](const DenseVector& x, const DenseVector& y,
620 const DenseVector& z) -> SparseMatrix {
621 x_ad.set_value(x);
622 y_ad.set_value(y);
623 z_ad.set_value(z);
624 return H_c.value();
625 },
626 [&](const DenseVector& x) -> DenseVector {
627 x_ad.set_value(x);
628 return c_e_ad.value();
629 },
630 [&](const DenseVector& x) -> SparseMatrix {
631 x_ad.set_value(x);
632 return A_e.value();
633 },
634 [&](const DenseVector& x) -> DenseVector {
635 x_ad.set_value(x);
636 return c_i_ad.value();
637 },
638 [&](const DenseVector& x) -> SparseMatrix {
639 x_ad.set_value(x);
640 return A_i.value();
641 }};
642
643 // Invoke interior-point method solver
644 status =
645 interior_point<Scalar>(matrix_callbacks, iteration_callbacks, options,
646#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
647 bound_constraint_mask,
648#endif
649 x);
650 }
651
652 if (options.diagnostics) {
653 slp::println("\nExit: {}", status);
654 }
655
656 // Assign the solution to the original Variable instances
657 VariableMatrix<Scalar>{m_decision_variables}.set_value(x);
658
659 return status;
660 }
661
662 /// Adds a callback to be called at the beginning of each solver iteration.
663 ///
664 /// The callback for this overload should return void.
665 ///
666 /// @param callback The callback.
667 template <typename F>
668 requires requires(F callback, const IterationInfo<Scalar>& info) {
669 { callback(info) } -> std::same_as<void>;
670 }
671 void add_callback(F&& callback) {
672 m_iteration_callbacks.emplace_back(
673 [=, callback =
674 std::forward<F>(callback)](const IterationInfo<Scalar>& info) {
675 callback(info);
676 return false;
677 });
678 }
679
680 /// Adds a callback to be called at the beginning of each solver iteration.
681 ///
682 /// The callback for this overload should return bool.
683 ///
684 /// @param callback The callback. Returning true from the callback causes the
685 /// solver to exit early with the solution it has so far.
686 template <typename F>
687 requires requires(F callback, const IterationInfo<Scalar>& info) {
688 { callback(info) } -> std::same_as<bool>;
689 }
690 void add_callback(F&& callback) {
691 m_iteration_callbacks.emplace_back(std::forward<F>(callback));
692 }
693
694 /// Clears the registered callbacks.
695 void clear_callbacks() { m_iteration_callbacks.clear(); }
696
697 /// Adds a callback to be called at the beginning of each solver iteration.
698 ///
699 /// Language bindings should call this in the Problem constructor to register
700 /// callbacks that shouldn't be removed by clear_callbacks(). Persistent
701 /// callbacks run after non-persistent callbacks.
702 ///
703 /// @param callback The callback. Returning true from the callback causes the
704 /// solver to exit early with the solution it has so far.
705 template <typename F>
706 requires requires(F callback, const IterationInfo<Scalar>& info) {
707 { callback(info) } -> std::same_as<bool>;
708 }
709 void add_persistent_callback(F&& callback) {
710 m_persistent_iteration_callbacks.emplace_back(std::forward<F>(callback));
711 }
712
713 private:
714 // The list of decision variables, which are the root of the problem's
715 // expression tree
716 gch::small_vector<Variable<Scalar>> m_decision_variables;
717
718 // The cost function: f(x)
719 std::optional<Variable<Scalar>> m_f;
720
721 // The list of equality constraints: cₑ(x) = 0
722 gch::small_vector<Variable<Scalar>> m_equality_constraints;
723
724 // The list of inequality constraints: cᵢ(x) ≥ 0
725 gch::small_vector<Variable<Scalar>> m_inequality_constraints;
726
727 // The iteration callbacks
728 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
729 m_iteration_callbacks;
730 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
731 m_persistent_iteration_callbacks;
732
733 void print_exit_conditions([[maybe_unused]] const Options& options) {
734 // Print possible exit conditions
735 slp::println("User-configured exit conditions:");
736 slp::println(" ↳ error below {}", options.tolerance);
737 if (!m_iteration_callbacks.empty() ||
738 !m_persistent_iteration_callbacks.empty()) {
739 slp::println(" ↳ iteration callback requested stop");
740 }
741 if (std::isfinite(options.max_iterations)) {
742 slp::println(" ↳ executed {} iterations", options.max_iterations);
743 }
744 if (std::isfinite(options.timeout.count())) {
745 slp::println(" ↳ {} elapsed", options.timeout);
746 }
747 }
748
749 void print_problem_analysis() {
750 constexpr std::array types{"no", "constant", "linear", "quadratic",
751 "nonlinear"};
752
753 // Print problem structure
754 slp::println("\nProblem structure:");
755 slp::println(" ↳ {} cost function",
757 slp::println(" ↳ {} equality constraints",
759 slp::println(" ↳ {} inequality constraints",
761
762 if (m_decision_variables.size() == 1) {
763 slp::print("\n1 decision variable\n");
764 } else {
765 slp::print("\n{} decision variables\n", m_decision_variables.size());
766 }
767
768 auto print_constraint_types =
769 [](const gch::small_vector<Variable<Scalar>>& constraints) {
770 std::array<size_t, 5> counts{};
771 for (const auto& constraint : constraints) {
772 ++counts[slp::to_underlying(constraint.type())];
773 }
774 for (size_t i = 0; i < counts.size(); ++i) {
775 constexpr std::array names{"empty", "constant", "linear",
776 "quadratic", "nonlinear"};
777 const auto& count = counts[i];
778 const auto& name = names[i];
779 if (count > 0) {
780 slp::println(" ↳ {} {}", count, name);
781 }
782 }
783 };
784
785 // Print constraint types
786 if (m_equality_constraints.size() == 1) {
787 slp::println("1 equality constraint");
788 } else {
789 slp::println("{} equality constraints", m_equality_constraints.size());
790 }
791 print_constraint_types(m_equality_constraints);
792 if (m_inequality_constraints.size() == 1) {
793 slp::println("1 inequality constraint");
794 } else {
795 slp::println("{} inequality constraints",
796 m_inequality_constraints.size());
797 }
798 print_constraint_types(m_inequality_constraints);
799 }
800};
801
802extern template class EXPORT_TEMPLATE_DECLARE(SLEIPNIR_DLLEXPORT)
804
805} // namespace slp
#define EXPORT_TEMPLATE_DECLARE(export)
Definition SymbolExports.hpp:94
within a display generated by the Derivative if and wherever such third party notices normally appear The contents of the NOTICE file are for informational purposes only and do not modify the License You may add Your own attribution notices within Derivative Works that You alongside or as an addendum to the NOTICE text from the provided that such additional attribution notices cannot be construed as modifying the License You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for or distribution of Your or for any such Derivative Works as a provided Your and distribution of the Work otherwise complies with the conditions stated in this License Submission of Contributions Unless You explicitly state any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this without any additional terms or conditions Notwithstanding the nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions Trademarks This License does not grant permission to use the trade names
Definition ThirdPartyNotices.txt:230
@ name
Definition base.h:690
This class allows the user to pose a constrained nonlinear optimization problem in natural mathematic...
Definition problem.hpp:68
VariableMatrix< Scalar > symmetric_decision_variable(int rows)
Creates a symmetric matrix of decision variables in the optimization problem.
Definition problem.hpp:119
void subject_to(InequalityConstraints< Scalar > &&constraint)
Tells the solver to solve the problem while satisfying the given inequality constraint.
Definition problem.hpp:230
void add_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:690
void subject_to(EqualityConstraints< Scalar > &&constraint)
Tells the solver to solve the problem while satisfying the given equality constraint.
Definition problem.hpp:208
ExpressionType equality_constraint_type() const
Returns the type of the highest order equality constraint.
Definition problem.hpp:251
void maximize(Variable< Scalar > &&objective)
Tells the solver to maximize the output of the given objective function.
Definition problem.hpp:188
void add_persistent_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:709
void subject_to(const InequalityConstraints< Scalar > &constraint)
Tells the solver to solve the problem while satisfying the given inequality constraint.
Definition problem.hpp:219
ExpressionType inequality_constraint_type() const
Returns the type of the highest order inequality constraint.
Definition problem.hpp:264
void minimize(const Variable< Scalar > &cost)
Tells the solver to minimize the output of the given cost function.
Definition problem.hpp:152
Problem() noexcept=default
Constructs the optimization problem.
VariableMatrix< Scalar > decision_variable(int rows, int cols=1)
Creates a matrix of decision variables in the optimization problem.
Definition problem.hpp:92
void minimize(Variable< Scalar > &&cost)
Tells the solver to minimize the output of the given cost function.
Definition problem.hpp:163
void clear_callbacks()
Clears the registered callbacks.
Definition problem.hpp:695
void subject_to(const EqualityConstraints< Scalar > &constraint)
Tells the solver to solve the problem while satisfying the given equality constraint.
Definition problem.hpp:197
void maximize(const Variable< Scalar > &objective)
Tells the solver to maximize the output of the given objective function.
Definition problem.hpp:174
void add_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:671
ExpressionType cost_function_type() const
Returns the cost function's type.
Definition problem.hpp:240
ExitStatus solve(const Options &options=Options{}, bool spy=false)
Solves the optimization problem.
Definition problem.hpp:282
Variable< Scalar > decision_variable()
Creates a decision variable in the optimization problem.
Definition problem.hpp:79
An autodiff variable pointing to an expression node.
Definition variable.hpp:47
ExpressionType type() const
Returns the type of this expression (constant, linear, quadratic, or nonlinear).
Definition variable.hpp:149
A matrix of autodiff variables.
Definition variable_matrix.hpp:33
constexpr auto count() -> int
Definition base.h:1081
wpi::util::SmallVector< T > small_vector
Definition small_vector.hpp:10
static constexpr empty_t empty
Designates an uninitialized VariableMatrix.
Definition empty.hpp:11
Definition to_underlying.hpp:7
constexpr std::underlying_type_t< Enum > to_underlying(Enum e) noexcept
Definition to_underlying.hpp:10
void project_onto_bounds(Eigen::DenseBase< Derived > &x, std::span< const std::pair< typename Eigen::DenseBase< Derived >::Scalar, typename Eigen::DenseBase< Derived >::Scalar > > decision_variable_indices_to_bounds, const typename Eigen::DenseBase< Derived >::Scalar κ_1=typename Eigen::DenseBase< Derived >::Scalar(1e-2), const typename Eigen::DenseBase< Derived >::Scalar κ_2=typename Eigen::DenseBase< Derived >::Scalar(1e-2))
Projects the decision variables onto the given bounds, while ensuring some configurable distance from...
Definition bounds.hpp:195
ExpressionType
Expression type.
Definition expression_type.hpp:16
@ CONSTANT
The expression is a constant.
Definition expression_type.hpp:20
@ NONE
There is no expression.
Definition expression_type.hpp:18
ExitStatus
Solver exit status. Negative values indicate failure.
Definition exit_status.hpp:14
@ GLOBALLY_INFEASIBLE
The problem setup frontend determined the problem to have an empty feasible region.
Definition exit_status.hpp:25
@ SUCCESS
Solved the problem to the desired tolerance.
Definition exit_status.hpp:16
auto bounds(L &&l, X &&x, U &&u)
Helper function for creating bound constraints.
Definition variable.hpp:1000
void print_setup_diagnostics(const gch::small_vector< SetupProfiler > &setup_profilers)
Prints setup diagnostics.
Definition print_diagnostics.hpp:325
ExitStatus interior_point(const InteriorPointMatrixCallbacks< Scalar > &matrix_callbacks, std::span< std::function< bool(const IterationInfo< Scalar > &info)> > iteration_callbacks, const Options &options, bool in_feasibility_restoration, Eigen::Vector< Scalar, Eigen::Dynamic > &x, Eigen::Vector< Scalar, Eigen::Dynamic > &s, Eigen::Vector< Scalar, Eigen::Dynamic > &y, Eigen::Vector< Scalar, Eigen::Dynamic > &z, Scalar &μ)
void print_bound_constraint_global_infeasibility_error(const std::span< const std::pair< Eigen::Index, Eigen::Index > > conflicting_lower_upper_bound_indices)
Definition print_diagnostics.hpp:155
Variable(T< Scalar >) -> Variable< Scalar >
void println(fmt::format_string< T... > fmt, T &&... args)
Wrapper around fmt::println() that squelches write failure exceptions.
Definition print.hpp:37
Bounds< Scalar > get_bounds(std::span< Variable< Scalar > > decision_variables, std::span< Variable< Scalar > > inequality_constraints, const Eigen::SparseMatrix< Scalar, Eigen::RowMajor > &A_i)
A "bound constraint" is any linear constraint in one scalar variable.
Definition bounds.hpp:55
void print(fmt::format_string< T... > fmt, T &&... args)
Wrapper around fmt::print() that squelches write failure exceptions.
Definition print.hpp:19
A vector of equality constraints of the form cₑ(x) = 0.
Definition variable.hpp:773
gch::small_vector< Variable< Scalar > > constraints
A vector of scalar equality constraints.
Definition variable.hpp:775
A vector of inequality constraints of the form cᵢ(x) ≥ 0.
Definition variable.hpp:830
gch::small_vector< Variable< Scalar > > constraints
A vector of scalar inequality constraints.
Definition variable.hpp:832
Solver iteration information exposed to an iteration callback.
Definition iteration_info.hpp:14
Solver options.
Definition options.hpp:13
#define SLEIPNIR_DLLEXPORT
Definition symbol_exports.hpp:34