WPILibC++ 2027.0.0-alpha-4
Loading...
Searching...
No Matches
problem.hpp
Go to the documentation of this file.
1// Copyright (c) Sleipnir contributors
2
3#pragma once
4
5#include <algorithm>
6#include <array>
7#include <cmath>
8#include <concepts>
9#include <functional>
10#include <iterator>
11#include <memory>
12#include <optional>
13#include <ranges>
14#include <utility>
15
16#include <Eigen/Core>
17#include <Eigen/SparseCore>
18#include <fmt/chrono.h>
19#include <gch/small_vector.hpp>
20
28#include "sleipnir/optimization/solver/interior_point.hpp"
30#include "sleipnir/optimization/solver/newton.hpp"
32#include "sleipnir/optimization/solver/sqp.hpp"
38#include "sleipnir/util/spy.hpp"
40
41namespace slp {
42
43/// This class allows the user to pose a constrained nonlinear optimization
44/// problem in natural mathematical notation and solve it.
45///
46/// This class supports problems of the form:
47///
48/// ```
49/// minₓ f(x)
50/// subject to cₑ(x) = 0
51/// cᵢ(x) ≥ 0
52/// ```
53///
54/// where f(x) is the scalar cost function, x is the vector of decision
55/// variables (variables the solver can tweak to minimize the cost function),
56/// cᵢ(x) are the inequality constraints, and cₑ(x) are the equality
57/// constraints. Constraints are equations or inequalities of the decision
58/// variables that constrain what values the solver is allowed to use when
59/// searching for an optimal solution.
60///
61/// The nice thing about this class is users don't have to put their system in
62/// the form shown above manually; they can write it in natural mathematical
63/// form and it'll be converted for them.
64///
65/// @tparam Scalar Scalar type.
66template <typename Scalar>
67class Problem {
68 public:
69 /// Construct the optimization problem.
70 Problem() noexcept = default;
71
72 /// Create a decision variable in the optimization problem.
73 ///
74 /// @return A decision variable in the optimization problem.
75 [[nodiscard]]
77 m_decision_variables.emplace_back();
78 return m_decision_variables.back();
79 }
80
81 /// Create a matrix of decision variables in the optimization problem.
82 ///
83 /// @param rows Number of matrix rows.
84 /// @param cols Number of matrix columns.
85 /// @return A matrix of decision variables in the optimization problem.
86 [[nodiscard]]
87 VariableMatrix<Scalar> decision_variable(int rows, int cols = 1) {
88 m_decision_variables.reserve(m_decision_variables.size() + rows * cols);
89
90 VariableMatrix<Scalar> vars{detail::empty, rows, cols};
91
92 for (int row = 0; row < rows; ++row) {
93 for (int col = 0; col < cols; ++col) {
94 m_decision_variables.emplace_back();
95 vars(row, col) = m_decision_variables.back();
96 }
97 }
98
99 return vars;
100 }
101
102 /// Create a symmetric matrix of decision variables in the optimization
103 /// problem.
104 ///
105 /// Variable instances are reused across the diagonal, which helps reduce
106 /// problem dimensionality.
107 ///
108 /// @param rows Number of matrix rows.
109 /// @return A symmetric matrix of decision varaibles in the optimization
110 /// problem.
111 [[nodiscard]]
113 // We only need to store the lower triangle of an n x n symmetric matrix;
114 // the other elements are duplicates. The lower triangle has (n² + n)/2
115 // elements.
116 //
117 // n
118 // Σ k = (n² + n)/2
119 // k=1
120 m_decision_variables.reserve(m_decision_variables.size() +
121 (rows * rows + rows) / 2);
122
123 VariableMatrix<Scalar> vars{detail::empty, rows, rows};
124
125 for (int row = 0; row < rows; ++row) {
126 for (int col = 0; col <= row; ++col) {
127 m_decision_variables.emplace_back();
128 vars(row, col) = m_decision_variables.back();
129 vars(col, row) = m_decision_variables.back();
130 }
131 }
132
133 return vars;
134 }
135
136 /// Tells the solver to minimize the output of the given cost function.
137 ///
138 /// Note that this is optional. If only constraints are specified, the solver
139 /// will find the closest solution to the initial conditions that's in the
140 /// feasible set.
141 ///
142 /// @param cost The cost function to minimize.
143 void minimize(const Variable<Scalar>& cost) { m_f = cost; }
144
145 /// Tells the solver to minimize the output of the given cost function.
146 ///
147 /// Note that this is optional. If only constraints are specified, the solver
148 /// will find the closest solution to the initial conditions that's in the
149 /// feasible set.
150 ///
151 /// @param cost The cost function to minimize.
152 void minimize(Variable<Scalar>&& cost) { m_f = std::move(cost); }
153
154 /// Tells the solver to maximize the output of the given objective function.
155 ///
156 /// Note that this is optional. If only constraints are specified, the solver
157 /// will find the closest solution to the initial conditions that's in the
158 /// feasible set.
159 ///
160 /// @param objective The objective function to maximize.
161 void maximize(const Variable<Scalar>& objective) {
162 // Maximizing a cost function is the same as minimizing its negative
163 m_f = -objective;
164 }
165
166 /// Tells the solver to maximize the output of the given objective function.
167 ///
168 /// Note that this is optional. If only constraints are specified, the solver
169 /// will find the closest solution to the initial conditions that's in the
170 /// feasible set.
171 ///
172 /// @param objective The objective function to maximize.
173 void maximize(Variable<Scalar>&& objective) {
174 // Maximizing a cost function is the same as minimizing its negative
175 m_f = -std::move(objective);
176 }
177
178 /// Tells the solver to solve the problem while satisfying the given equality
179 /// constraint.
180 ///
181 /// @param constraint The constraint to satisfy.
182 void subject_to(const EqualityConstraints<Scalar>& constraint) {
183 m_equality_constraints.reserve(m_equality_constraints.size() +
184 constraint.constraints.size());
185 std::ranges::copy(constraint.constraints,
186 std::back_inserter(m_equality_constraints));
187 }
188
189 /// Tells the solver to solve the problem while satisfying the given equality
190 /// constraint.
191 ///
192 /// @param constraint The constraint to satisfy.
194 m_equality_constraints.reserve(m_equality_constraints.size() +
195 constraint.constraints.size());
196 std::ranges::copy(constraint.constraints,
197 std::back_inserter(m_equality_constraints));
198 }
199
200 /// Tells the solver to solve the problem while satisfying the given
201 /// inequality constraint.
202 ///
203 /// @param constraint The constraint to satisfy.
205 m_inequality_constraints.reserve(m_inequality_constraints.size() +
206 constraint.constraints.size());
207 std::ranges::copy(constraint.constraints,
208 std::back_inserter(m_inequality_constraints));
209 }
210
211 /// Tells the solver to solve the problem while satisfying the given
212 /// inequality constraint.
213 ///
214 /// @param constraint The constraint to satisfy.
216 m_inequality_constraints.reserve(m_inequality_constraints.size() +
217 constraint.constraints.size());
218 std::ranges::copy(constraint.constraints,
219 std::back_inserter(m_inequality_constraints));
220 }
221
222 /// Returns the cost function's type.
223 ///
224 /// @return The cost function's type.
226 if (m_f) {
227 return m_f.value().type();
228 } else {
230 }
231 }
232
233 /// Returns the type of the highest order equality constraint.
234 ///
235 /// @return The type of the highest order equality constraint.
237 if (!m_equality_constraints.empty()) {
238 return std::ranges::max(m_equality_constraints, {},
240 .type();
241 } else {
243 }
244 }
245
246 /// Returns the type of the highest order inequality constraint.
247 ///
248 /// @return The type of the highest order inequality constraint.
250 if (!m_inequality_constraints.empty()) {
251 return std::ranges::max(m_inequality_constraints, {},
253 .type();
254 } else {
256 }
257 }
258
259 /// Solve the optimization problem. The solution will be stored in the
260 /// original variables used to construct the problem.
261 ///
262 /// @param options Solver options.
263 /// @param spy Enables writing sparsity patterns of H, Aₑ, and Aᵢ to files
264 /// named H.spy, A_e.spy, and A_i.spy respectively during solve. Use
265 /// tools/spy.py to plot them.
266 /// @return The solver status.
267 ExitStatus solve(const Options& options = Options{},
268 [[maybe_unused]] bool spy = false) {
269 using DenseVector = Eigen::Vector<Scalar, Eigen::Dynamic>;
270 using SparseMatrix = Eigen::SparseMatrix<Scalar>;
271 using SparseVector = Eigen::SparseVector<Scalar>;
272
273 // Create the initial value column vector
274 DenseVector x{m_decision_variables.size()};
275 for (size_t i = 0; i < m_decision_variables.size(); ++i) {
276 x[i] = m_decision_variables[i].value();
277 }
278
279 if (options.diagnostics) {
280 print_exit_conditions(options);
281 print_problem_analysis();
282 }
283
284 // Get the highest order constraint expression types
285 auto f_type = cost_function_type();
286 auto c_e_type = equality_constraint_type();
287 auto c_i_type = inequality_constraint_type();
288
289 // If the problem is empty or constant, there's nothing to do
290 if (f_type <= ExpressionType::CONSTANT &&
291 c_e_type <= ExpressionType::CONSTANT &&
292 c_i_type <= ExpressionType::CONSTANT) {
293#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
294 if (options.diagnostics) {
295 slp::println("\nInvoking no-op solver...\n");
296 }
297#endif
298 return ExitStatus::SUCCESS;
299 }
300
301 gch::small_vector<SetupProfiler> ad_setup_profilers;
302 ad_setup_profilers.emplace_back("setup").start();
303
304 VariableMatrix<Scalar> x_ad{m_decision_variables};
305
306 // Set up cost function
307 Variable f = m_f.value_or(Scalar(0));
308
309 // Set up gradient autodiff
310 ad_setup_profilers.emplace_back(" ↳ ∇f(x)").start();
311 Gradient g{f, x_ad};
312 ad_setup_profilers.back().stop();
313
314 [[maybe_unused]]
315 int num_decision_variables = m_decision_variables.size();
316 [[maybe_unused]]
317 int num_equality_constraints = m_equality_constraints.size();
318 [[maybe_unused]]
319 int num_inequality_constraints = m_inequality_constraints.size();
320
321 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
322 callbacks;
323 for (const auto& callback : m_iteration_callbacks) {
324 callbacks.emplace_back(callback);
325 }
326 for (const auto& callback : m_persistent_iteration_callbacks) {
327 callbacks.emplace_back(callback);
328 }
329
330 // Solve the optimization problem
331 ExitStatus status;
332 if (m_equality_constraints.empty() && m_inequality_constraints.empty()) {
333 if (options.diagnostics) {
334 slp::println("\nInvoking Newton solver...\n");
335 }
336
337 // Set up Lagrangian Hessian autodiff
338 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
339 Hessian<Scalar, Eigen::Lower> H{f, x_ad};
340 ad_setup_profilers.back().stop();
341
342 ad_setup_profilers[0].stop();
343
344#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
345 // Sparsity pattern files written when spy flag is set
346 std::unique_ptr<Spy<Scalar>> H_spy;
347
348 if (spy) {
349 H_spy = std::make_unique<Spy<Scalar>>(
350 "H.spy", "Hessian", "Decision variables", "Decision variables",
351 num_decision_variables, num_decision_variables);
352 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
353 H_spy->add(info.H);
354 return false;
355 });
356 }
357#endif
358
359 // Invoke Newton solver
360 status = newton<Scalar>(NewtonMatrixCallbacks<Scalar>{
361 [&](const DenseVector& x) -> Scalar {
362 x_ad.set_value(x);
363 return f.value();
364 },
365 [&](const DenseVector& x) -> SparseVector {
366 x_ad.set_value(x);
367 return g.value();
368 },
369 [&](const DenseVector& x) -> SparseMatrix {
370 x_ad.set_value(x);
371 return H.value();
372 }},
373 callbacks, options, x);
374 } else if (m_inequality_constraints.empty()) {
375 if (options.diagnostics) {
376 slp::println("\nInvoking SQP solver\n");
377 }
378
379 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
380
381 // Set up equality constraint Jacobian autodiff
382 ad_setup_profilers.emplace_back(" ↳ ∂cₑ/∂x").start();
383 Jacobian A_e{c_e_ad, x_ad};
384 ad_setup_profilers.back().stop();
385
386 // Set up Lagrangian
387 VariableMatrix<Scalar> y_ad(num_equality_constraints);
388 Variable L = f - y_ad.T() * c_e_ad;
389
390 // Set up Lagrangian Hessian autodiff
391 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
392 Hessian<Scalar, Eigen::Lower> H{L, x_ad};
393 ad_setup_profilers.back().stop();
394
395 ad_setup_profilers[0].stop();
396
397#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
398 // Sparsity pattern files written when spy flag is set
399 std::unique_ptr<Spy<Scalar>> H_spy;
400 std::unique_ptr<Spy<Scalar>> A_e_spy;
401
402 if (spy) {
403 H_spy = std::make_unique<Spy<Scalar>>(
404 "H.spy", "Hessian", "Decision variables", "Decision variables",
405 num_decision_variables, num_decision_variables);
406 A_e_spy = std::make_unique<Spy<Scalar>>(
407 "A_e.spy", "Equality constraint Jacobian", "Constraints",
408 "Decision variables", num_equality_constraints,
409 num_decision_variables);
410 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
411 H_spy->add(info.H);
412 A_e_spy->add(info.A_e);
413 return false;
414 });
415 }
416#endif
417
418 // Invoke SQP solver
419 status = sqp<Scalar>(
420 SQPMatrixCallbacks<Scalar>{
421 [&](const DenseVector& x) -> Scalar {
422 x_ad.set_value(x);
423 return f.value();
424 },
425 [&](const DenseVector& x) -> SparseVector {
426 x_ad.set_value(x);
427 return g.value();
428 },
429 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
430 x_ad.set_value(x);
431 y_ad.set_value(y);
432 return H.value();
433 },
434 [&](const DenseVector& x) -> DenseVector {
435 x_ad.set_value(x);
436 return c_e_ad.value();
437 },
438 [&](const DenseVector& x) -> SparseMatrix {
439 x_ad.set_value(x);
440 return A_e.value();
441 }},
442 callbacks, options, x);
443 } else {
444 if (options.diagnostics) {
445 slp::println("\nInvoking IPM solver...\n");
446 }
447
448 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
449 VariableMatrix<Scalar> c_i_ad{m_inequality_constraints};
450
451 // Set up equality constraint Jacobian autodiff
452 ad_setup_profilers.emplace_back(" ↳ ∂cₑ/∂x").start();
453 Jacobian A_e{c_e_ad, x_ad};
454 ad_setup_profilers.back().stop();
455
456 // Set up inequality constraint Jacobian autodiff
457 ad_setup_profilers.emplace_back(" ↳ ∂cᵢ/∂x").start();
458 Jacobian A_i{c_i_ad, x_ad};
459 ad_setup_profilers.back().stop();
460
461 // Set up Lagrangian
462 VariableMatrix<Scalar> y_ad(num_equality_constraints);
463 VariableMatrix<Scalar> z_ad(num_inequality_constraints);
464 Variable L = f - y_ad.T() * c_e_ad - z_ad.T() * c_i_ad;
465
466 // Set up Lagrangian Hessian autodiff
467 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
468 Hessian<Scalar, Eigen::Lower> H{L, x_ad};
469 ad_setup_profilers.back().stop();
470
471 ad_setup_profilers[0].stop();
472
473#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
474 // Sparsity pattern files written when spy flag is set
475 std::unique_ptr<Spy<Scalar>> H_spy;
476 std::unique_ptr<Spy<Scalar>> A_e_spy;
477 std::unique_ptr<Spy<Scalar>> A_i_spy;
478
479 if (spy) {
480 H_spy = std::make_unique<Spy<Scalar>>(
481 "H.spy", "Hessian", "Decision variables", "Decision variables",
482 num_decision_variables, num_decision_variables);
483 A_e_spy = std::make_unique<Spy<Scalar>>(
484 "A_e.spy", "Equality constraint Jacobian", "Constraints",
485 "Decision variables", num_equality_constraints,
486 num_decision_variables);
487 A_i_spy = std::make_unique<Spy<Scalar>>(
488 "A_i.spy", "Inequality constraint Jacobian", "Constraints",
489 "Decision variables", num_inequality_constraints,
490 num_decision_variables);
491 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
492 H_spy->add(info.H);
493 A_e_spy->add(info.A_e);
494 A_i_spy->add(info.A_i);
495 return false;
496 });
497 }
498#endif
499
500 const auto [bound_constraint_mask, bounds, conflicting_bound_indices] =
501 get_bounds<Scalar>(m_decision_variables, m_inequality_constraints,
502 A_i.value());
503 if (!conflicting_bound_indices.empty()) {
504 if (options.diagnostics) {
506 conflicting_bound_indices);
507 }
509 }
510
511#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
513#endif
514 // Invoke interior-point method solver
515 status = interior_point<Scalar>(
516 InteriorPointMatrixCallbacks<Scalar>{
517 [&](const DenseVector& x) -> Scalar {
518 x_ad.set_value(x);
519 return f.value();
520 },
521 [&](const DenseVector& x) -> SparseVector {
522 x_ad.set_value(x);
523 return g.value();
524 },
525 [&](const DenseVector& x, const DenseVector& y,
526 const DenseVector& z) -> SparseMatrix {
527 x_ad.set_value(x);
528 y_ad.set_value(y);
529 z_ad.set_value(z);
530 return H.value();
531 },
532 [&](const DenseVector& x) -> DenseVector {
533 x_ad.set_value(x);
534 return c_e_ad.value();
535 },
536 [&](const DenseVector& x) -> SparseMatrix {
537 x_ad.set_value(x);
538 return A_e.value();
539 },
540 [&](const DenseVector& x) -> DenseVector {
541 x_ad.set_value(x);
542 return c_i_ad.value();
543 },
544 [&](const DenseVector& x) -> SparseMatrix {
545 x_ad.set_value(x);
546 return A_i.value();
547 }},
548 callbacks, options,
549#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
550 bound_constraint_mask,
551#endif
552 x);
553 }
554
555 if (options.diagnostics) {
556 print_autodiff_diagnostics(ad_setup_profilers);
557 slp::println("\nExit: {}", status);
558 }
559
560 // Assign the solution to the original Variable instances
561 VariableMatrix<Scalar>{m_decision_variables}.set_value(x);
562
563 return status;
564 }
565
566 /// Adds a callback to be called at the beginning of each solver iteration.
567 ///
568 /// The callback for this overload should return void.
569 ///
570 /// @param callback The callback.
571 template <typename F>
572 requires requires(F callback, const IterationInfo<Scalar>& info) {
573 { callback(info) } -> std::same_as<void>;
574 }
575 void add_callback(F&& callback) {
576 m_iteration_callbacks.emplace_back(
577 [=, callback =
578 std::forward<F>(callback)](const IterationInfo<Scalar>& info) {
579 callback(info);
580 return false;
581 });
582 }
583
584 /// Adds a callback to be called at the beginning of each solver iteration.
585 ///
586 /// The callback for this overload should return bool.
587 ///
588 /// @param callback The callback. Returning true from the callback causes the
589 /// solver to exit early with the solution it has so far.
590 template <typename F>
591 requires requires(F callback, const IterationInfo<Scalar>& info) {
592 { callback(info) } -> std::same_as<bool>;
593 }
594 void add_callback(F&& callback) {
595 m_iteration_callbacks.emplace_back(std::forward<F>(callback));
596 }
597
598 /// Clears the registered callbacks.
599 void clear_callbacks() { m_iteration_callbacks.clear(); }
600
601 /// Adds a callback to be called at the beginning of each solver iteration.
602 ///
603 /// Language bindings should call this in the Problem constructor to register
604 /// callbacks that shouldn't be removed by clear_callbacks(). Persistent
605 /// callbacks run after non-persistent callbacks.
606 ///
607 /// @param callback The callback. Returning true from the callback causes the
608 /// solver to exit early with the solution it has so far.
609 template <typename F>
610 requires requires(F callback, const IterationInfo<Scalar>& info) {
611 { callback(info) } -> std::same_as<bool>;
612 }
613 void add_persistent_callback(F&& callback) {
614 m_persistent_iteration_callbacks.emplace_back(std::forward<F>(callback));
615 }
616
617 private:
618 // The list of decision variables, which are the root of the problem's
619 // expression tree
620 gch::small_vector<Variable<Scalar>> m_decision_variables;
621
622 // The cost function: f(x)
623 std::optional<Variable<Scalar>> m_f;
624
625 // The list of equality constraints: cₑ(x) = 0
626 gch::small_vector<Variable<Scalar>> m_equality_constraints;
627
628 // The list of inequality constraints: cᵢ(x) ≥ 0
629 gch::small_vector<Variable<Scalar>> m_inequality_constraints;
630
631 // The iteration callbacks
632 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
633 m_iteration_callbacks;
634 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
635 m_persistent_iteration_callbacks;
636
637 void print_exit_conditions([[maybe_unused]] const Options& options) {
638 // Print possible exit conditions
639 slp::println("User-configured exit conditions:");
640 slp::println(" ↳ error below {}", options.tolerance);
641 if (!m_iteration_callbacks.empty() ||
642 !m_persistent_iteration_callbacks.empty()) {
643 slp::println(" ↳ iteration callback requested stop");
644 }
645 if (std::isfinite(options.max_iterations)) {
646 slp::println(" ↳ executed {} iterations", options.max_iterations);
647 }
648 if (std::isfinite(options.timeout.count())) {
649 slp::println(" ↳ {} elapsed", options.timeout);
650 }
651 }
652
653 void print_problem_analysis() {
654 constexpr std::array types{"no", "constant", "linear", "quadratic",
655 "nonlinear"};
656
657 // Print problem structure
658 slp::println("\nProblem structure:");
659 slp::println(" ↳ {} cost function",
660 types[static_cast<uint8_t>(cost_function_type())]);
661 slp::println(" ↳ {} equality constraints",
662 types[static_cast<uint8_t>(equality_constraint_type())]);
663 slp::println(" ↳ {} inequality constraints",
664 types[static_cast<uint8_t>(inequality_constraint_type())]);
665
666 if (m_decision_variables.size() == 1) {
667 slp::print("\n1 decision variable\n");
668 } else {
669 slp::print("\n{} decision variables\n", m_decision_variables.size());
670 }
671
672 auto print_constraint_types =
673 [](const gch::small_vector<Variable<Scalar>>& constraints) {
674 std::array<size_t, 5> counts{};
675 for (const auto& constraint : constraints) {
676 ++counts[static_cast<uint8_t>(constraint.type())];
677 }
678 for (size_t i = 0; i < counts.size(); ++i) {
679 constexpr std::array names{"empty", "constant", "linear",
680 "quadratic", "nonlinear"};
681 const auto& count = counts[i];
682 const auto& name = names[i];
683 if (count > 0) {
684 slp::println(" ↳ {} {}", count, name);
685 }
686 }
687 };
688
689 // Print constraint types
690 if (m_equality_constraints.size() == 1) {
691 slp::println("1 equality constraint");
692 } else {
693 slp::println("{} equality constraints", m_equality_constraints.size());
694 }
695 print_constraint_types(m_equality_constraints);
696 if (m_inequality_constraints.size() == 1) {
697 slp::println("1 inequality constraint");
698 } else {
699 slp::println("{} inequality constraints",
700 m_inequality_constraints.size());
701 }
702 print_constraint_types(m_inequality_constraints);
703 }
704};
705
706extern template class EXPORT_TEMPLATE_DECLARE(SLEIPNIR_DLLEXPORT)
708
709} // namespace slp
#define EXPORT_TEMPLATE_DECLARE(export)
Definition SymbolExports.hpp:94
within a display generated by the Derivative if and wherever such third party notices normally appear The contents of the NOTICE file are for informational purposes only and do not modify the License You may add Your own attribution notices within Derivative Works that You alongside or as an addendum to the NOTICE text from the provided that such additional attribution notices cannot be construed as modifying the License You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for or distribution of Your or for any such Derivative Works as a provided Your and distribution of the Work otherwise complies with the conditions stated in this License Submission of Contributions Unless You explicitly state any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this without any additional terms or conditions Notwithstanding the nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions Trademarks This License does not grant permission to use the trade names
Definition ThirdPartyNotices.txt:234
@ name
Definition base.h:690
This class allows the user to pose a constrained nonlinear optimization problem in natural mathematic...
Definition problem.hpp:67
VariableMatrix< Scalar > symmetric_decision_variable(int rows)
Create a symmetric matrix of decision variables in the optimization problem.
Definition problem.hpp:112
void subject_to(InequalityConstraints< Scalar > &&constraint)
Tells the solver to solve the problem while satisfying the given inequality constraint.
Definition problem.hpp:215
void add_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:594
void subject_to(EqualityConstraints< Scalar > &&constraint)
Tells the solver to solve the problem while satisfying the given equality constraint.
Definition problem.hpp:193
ExpressionType equality_constraint_type() const
Returns the type of the highest order equality constraint.
Definition problem.hpp:236
void maximize(Variable< Scalar > &&objective)
Tells the solver to maximize the output of the given objective function.
Definition problem.hpp:173
void add_persistent_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:613
void subject_to(const InequalityConstraints< Scalar > &constraint)
Tells the solver to solve the problem while satisfying the given inequality constraint.
Definition problem.hpp:204
ExpressionType inequality_constraint_type() const
Returns the type of the highest order inequality constraint.
Definition problem.hpp:249
void minimize(const Variable< Scalar > &cost)
Tells the solver to minimize the output of the given cost function.
Definition problem.hpp:143
Problem() noexcept=default
Construct the optimization problem.
VariableMatrix< Scalar > decision_variable(int rows, int cols=1)
Create a matrix of decision variables in the optimization problem.
Definition problem.hpp:87
void minimize(Variable< Scalar > &&cost)
Tells the solver to minimize the output of the given cost function.
Definition problem.hpp:152
void clear_callbacks()
Clears the registered callbacks.
Definition problem.hpp:599
void subject_to(const EqualityConstraints< Scalar > &constraint)
Tells the solver to solve the problem while satisfying the given equality constraint.
Definition problem.hpp:182
void maximize(const Variable< Scalar > &objective)
Tells the solver to maximize the output of the given objective function.
Definition problem.hpp:161
void add_callback(F &&callback)
Adds a callback to be called at the beginning of each solver iteration.
Definition problem.hpp:575
ExpressionType cost_function_type() const
Returns the cost function's type.
Definition problem.hpp:225
ExitStatus solve(const Options &options=Options{}, bool spy=false)
Solve the optimization problem.
Definition problem.hpp:267
Variable< Scalar > decision_variable()
Create a decision variable in the optimization problem.
Definition problem.hpp:76
An autodiff variable pointing to an expression node.
Definition variable.hpp:47
ExpressionType type() const
Returns the type of this expression (constant, linear, quadratic, or nonlinear).
Definition variable.hpp:149
A matrix of autodiff variables.
Definition variable_matrix.hpp:33
constexpr auto count() -> int
Definition base.h:1081
wpi::util::SmallVector< T > small_vector
Definition small_vector.hpp:10
static constexpr empty_t empty
Designates an uninitialized VariableMatrix.
Definition empty.hpp:11
Definition expression_graph.hpp:11
void project_onto_bounds(Eigen::DenseBase< Derived > &x, std::span< const std::pair< typename Eigen::DenseBase< Derived >::Scalar, typename Eigen::DenseBase< Derived >::Scalar > > decision_variable_indices_to_bounds, const typename Eigen::DenseBase< Derived >::Scalar κ_1=typename Eigen::DenseBase< Derived >::Scalar(1e-2), const typename Eigen::DenseBase< Derived >::Scalar κ_2=typename Eigen::DenseBase< Derived >::Scalar(1e-2))
Projects the decision variables onto the given bounds, while ensuring some configurable distance from...
Definition bounds.hpp:195
ExpressionType
Expression type.
Definition expression_type.hpp:16
@ CONSTANT
The expression is a constant.
Definition expression_type.hpp:20
@ NONE
There is no expression.
Definition expression_type.hpp:18
ExitStatus
Solver exit status. Negative values indicate failure.
Definition exit_status.hpp:14
@ GLOBALLY_INFEASIBLE
The problem setup frontend determined the problem to have an empty feasible region.
Definition exit_status.hpp:25
@ SUCCESS
Solved the problem to the desired tolerance.
Definition exit_status.hpp:16
auto bounds(L &&l, X &&x, U &&u)
Helper function for creating bound constraints.
Definition variable.hpp:910
void print_bound_constraint_global_infeasibility_error(const std::span< const std::pair< Eigen::Index, Eigen::Index > > conflicting_lower_upper_bound_indices)
Definition print_diagnostics.hpp:153
Variable(T< Scalar >) -> Variable< Scalar >
void println(fmt::format_string< T... > fmt, T &&... args)
Wrapper around fmt::println() that squelches write failure exceptions.
Definition print.hpp:37
Bounds< Scalar > get_bounds(std::span< Variable< Scalar > > decision_variables, std::span< Variable< Scalar > > inequality_constraints, const Eigen::SparseMatrix< Scalar, Eigen::RowMajor > &A_i)
A "bound constraint" is any linear constraint in one scalar variable.
Definition bounds.hpp:55
void print(fmt::format_string< T... > fmt, T &&... args)
Wrapper around fmt::print() that squelches write failure exceptions.
Definition print.hpp:19
void print_autodiff_diagnostics(const gch::small_vector< SetupProfiler > &setup_profilers)
Prints autodiff diagnostics.
Definition print_diagnostics.hpp:323
A vector of equality constraints of the form cₑ(x) = 0.
Definition variable.hpp:683
gch::small_vector< Variable< Scalar > > constraints
A vector of scalar equality constraints.
Definition variable.hpp:685
A vector of inequality constraints of the form cᵢ(x) ≥ 0.
Definition variable.hpp:740
gch::small_vector< Variable< Scalar > > constraints
A vector of scalar inequality constraints.
Definition variable.hpp:742
Solver iteration information exposed to an iteration callback.
Definition iteration_info.hpp:14
Solver options.
Definition options.hpp:13
#define SLEIPNIR_DLLEXPORT
Definition symbol_exports.hpp:34