50 using Result =
decltype(std::apply(f, x));
51 using Scalar = ::smooth::Scalar<Result>;
56 using AdScalar = autodiff::HigherOrderDual<K, Scalar>;
58 Result F = std::apply(f, x);
60 static constexpr auto Nx = wrt_Dof<decltype(x)>();
61 static constexpr auto Ny = Dof<Result>;
62 const Eigen::Index nx = std::apply([](
auto &&... args) {
return (
dof(args) + ...); }, x);
63 const Eigen::Index ny =
dof(F);
66 const auto x_ad = wrt_cast<AdScalar>(x);
69 if constexpr (K == 1) {
71 Matrix<AdScalar, Nx, 1> a_ad = Matrix<AdScalar, Nx, 1>::Zero(nx);
74 const auto f_ad = [&](Matrix<AdScalar, Nx, 1> & var) -> Matrix<AdScalar, Ny, 1> {
75 return rminus<CastT<AdScalar, Result>>(std::apply(f, wrt_rplus(x_ad, var)), F_ad);
78 Matrix<Scalar, Ny, Nx> J(ny, nx);
80 J = autodiff::jacobian(f_ad, autodiff::wrt(a_ad), autodiff::at(a_ad));
81 return std::make_pair(std::move(F), std::move(J));
82 }
else if constexpr (K == 2) {
85 [&f, &x_ad, &F_ad](Matrix<AdScalar, Nx, 1> & var1, Matrix<AdScalar, Nx, 1> & var2) -> Matrix<AdScalar, Ny, 1> {
86 return rminus(std::apply(f, wrt_rplus(wrt_rplus(x_ad, var1), var2)), F_ad);
89 Matrix<Scalar, Ny, Nx> J(ny, nx);
90 Matrix<
Scalar, Nx, std::min(Nx, Ny) == -1 ? -1 : Nx * Ny> H(nx, nx * ny);
93 Matrix<AdScalar, Nx, 1> a_ad1 = Matrix<AdScalar, Nx, 1>::Zero(nx);
94 Matrix<AdScalar, Nx, 1> a_ad2 = Matrix<AdScalar, Nx, 1>::Zero(nx);
96 const auto a_wrt1 = autodiff::wrt(a_ad1);
97 const auto a_wrt2 = autodiff::wrt(a_ad2);
99 autodiff::detail::ForEachWrtVar(a_wrt1, [&](
auto && i,
auto && xi)
constexpr {
100 autodiff::detail::ForEachWrtVar(a_wrt2, [&](
auto && j,
auto && xj)
constexpr {
101 const auto u = autodiff::detail::eval(f_ad, autodiff::at(a_ad1, a_ad2), autodiff::wrt(xi, xj));
102 for (
auto k = 0u; k < ny; ++k) {
103 J(k, i) =
static_cast<double>(autodiff::detail::derivative<1>(u[k]));
104 H(j, k * nx + i) = autodiff::detail::derivative<2>(u[k]);
109 return std::make_tuple(std::move(F), std::move(J), std::move(H));