diff --git a/MParT/Distributions/GaussianSamplerDensity.h b/MParT/Distributions/GaussianSamplerDensity.h index bad4a8f5..b7add81e 100644 --- a/MParT/Distributions/GaussianSamplerDensity.h +++ b/MParT/Distributions/GaussianSamplerDensity.h @@ -54,6 +54,8 @@ class GaussianSamplerDensity: public SampleGenerator, public Densit unsigned int Dim() const override { return dim_; }; + bool IsStandardNormal() const { return idCov_ && mean_.size() == 0; } + protected: using GeneratorType = typename SampleGenerator::PoolType::generator_type; using SampleGenerator::rand_pool; diff --git a/MParT/MapObjective.h b/MParT/MapObjective.h index 86ce36e3..5938a82d 100644 --- a/MParT/MapObjective.h +++ b/MParT/MapObjective.h @@ -178,12 +178,44 @@ class KLObjective: public MapObjective { double ObjectiveImpl(StridedMatrix data, std::shared_ptr> map) const override; void CoeffGradImpl(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const override; unsigned int MapOutputDim() const override {return density_->Dim();} - private: + /** * @brief Density \f$\mu\f$ to calculate the KL with respect to (i.e. \f$D(\cdot||\mu)\f$ ) * */ - std::shared_ptr> density_; + const std::shared_ptr> density_; +}; + +template +class FastGaussianReverseKLObjective: public MapObjective { + public: + FastGaussianReverseKLObjective(StridedMatrix train, unsigned int numCoeffs); + FastGaussianReverseKLObjective(StridedMatrix train, StridedMatrix test, unsigned int numCoeffs); + + double ObjectivePlusCoeffGradImpl(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const override; + + double ObjectiveImpl(StridedMatrix data, std::shared_ptr> map) const override; + + void CoeffGradImpl(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const override; + + private: + using ExecSpace = typename MemoryToExecution::Space; + enum class ObjectiveType {Eval = 0, Grad = 1, EvalGrad = 2}; + + template + void FillSpaces(std::shared_ptr> map, StridedMatrix data) const; + + template + double CommonEval(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const; + + template + void ClearSpaces() const; + + mutable Kokkos::View eval_space_; + mutable Kokkos::View logdet_space_; + + mutable Kokkos::View grad_space_; + mutable Kokkos::View logdet_grad_space_; }; namespace ObjectiveFactory { diff --git a/MParT/TrainMap.h b/MParT/TrainMap.h index b54758df..e538aec1 100644 --- a/MParT/TrainMap.h +++ b/MParT/TrainMap.h @@ -5,6 +5,7 @@ #include #include #include "MParT/ConditionalMapBase.h" +#include "MParT/TriangularMap.h" #include "MParT/MapObjective.h" namespace mpart { diff --git a/MParT/TriangularMap.h b/MParT/TriangularMap.h index 27d96877..ef79d6a1 100644 --- a/MParT/TriangularMap.h +++ b/MParT/TriangularMap.h @@ -64,6 +64,7 @@ class TriangularMap : public ConditionalMapBase{ void WrapCoeffs(Kokkos::View coeffs) override; virtual std::shared_ptr> GetComponent(unsigned int i){ return comps_.at(i);} + unsigned int NumComponents() const { return comps_.size();} /** @brief Computes the log determinant of the Jacobian matrix of this map. diff --git a/MParT/Utilities/ArrayConversions.h b/MParT/Utilities/ArrayConversions.h index cc9cde94..24be2ac1 100644 --- a/MParT/Utilities/ArrayConversions.h +++ b/MParT/Utilities/ArrayConversions.h @@ -148,9 +148,10 @@ namespace mpart{ template std::vector::type> KokkosToStd(Kokkos::View const& view) { + auto view_copy = Kokkos::create_mirror_view_and_copy(Kokkos::HostSpace(), view); std::vector::type> output(view.extent(0)); for(unsigned int i=0; i::LogDensityImpl(StridedMatrix diff ("diff", M, N); if(mean_.extent(0) == 0){ - Kokkos::parallel_for(policy, KOKKOS_CLASS_LAMBDA(const int& j, const int& i) { - diff(i,j) = pts(i,j); - }); + Kokkos::deep_copy(diff, pts); } else { Kokkos::parallel_for(policy, KOKKOS_CLASS_LAMBDA(const int& j, const int& i) { @@ -75,7 +73,17 @@ void GaussianSamplerDensity::LogDensityInputGradImpl(StridedMatrix< } if(!idCov_) { - covChol_.solveInPlace(output); + + if(output.stride_0()==1){ + covChol_.solveInPlace(output); + }else{ + StridedMatrix outLeft; + outLeft = Kokkos::View("OutLeft", output.extent(0), output.extent(1)); + Kokkos::deep_copy(outLeft, output); + covChol_.solveInPlace(outLeft); + Kokkos::deep_copy(output, outLeft); + } + } } diff --git a/src/MapObjective.cpp b/src/MapObjective.cpp index 50431688..0946a0b3 100644 --- a/src/MapObjective.cpp +++ b/src/MapObjective.cpp @@ -6,6 +6,7 @@ template double MapObjective::operator()(unsigned int n, const double* coeffs, double* grad, std::shared_ptr> map) { Kokkos::View coeffView = ToConstKokkos(coeffs, n); + if (grad == nullptr) return ObjectiveImpl(train_, map); StridedVector gradView = ToKokkos(grad, n); map->SetCoeffs(coeffView); return ObjectivePlusCoeffGradImpl(train_, gradView, map); @@ -89,6 +90,149 @@ double KLObjective::ObjectivePlusCoeffGradImpl(StridedMatrix +FastGaussianReverseKLObjective::FastGaussianReverseKLObjective(StridedMatrix train, StridedMatrix test, unsigned int numCoeffs) : MapObjective(train, test) { + unsigned int N = std::max(train.extent(1), test.extent(1)); + eval_space_ = Kokkos::View("eval_space", 1, N); + logdet_space_ = Kokkos::View("logdet_space", N); + grad_space_ = Kokkos::View("grad_space", numCoeffs, N); + logdet_grad_space_ = Kokkos::View("logdet_grad_space", numCoeffs, N); +} + +template +FastGaussianReverseKLObjective::FastGaussianReverseKLObjective(StridedMatrix train, unsigned int numCoeffs) : FastGaussianReverseKLObjective(train, Kokkos::View(), numCoeffs) {} + +template +template +void FastGaussianReverseKLObjective::FillSpaces(std::shared_ptr> map, StridedMatrix data) const { + constexpr ObjectiveType Type = static_cast(Type_idx); + unsigned int N_points = data.extent(1); + if(N_points > eval_space_.extent(1)){ + std::stringstream ss; + ss << "FastGaussianReverseKLObjective: Not enough space allocated for evaluation!" << + "Need " << N_points << " points, storing " << eval_space_.extent(1) << " points."; + throw std::invalid_argument(ss.str().c_str()); + } + StridedMatrix eval_space_view = Kokkos::subview(eval_space_, Kokkos::ALL(), Kokkos::make_pair(0u, N_points)); + map->EvaluateImpl(data, eval_space_view); + if constexpr((Type == ObjectiveType::Eval) || (Type == ObjectiveType::EvalGrad)) { + StridedVector logdet_space_view = Kokkos::subview(logdet_space_, Kokkos::make_pair(0u, N_points)); + map->LogDeterminantImpl(data, logdet_space_view); + } + if constexpr((Type == ObjectiveType::Grad) || (Type == ObjectiveType::EvalGrad)) { + StridedMatrix grad_space_view = Kokkos::subview(grad_space_, Kokkos::ALL(), Kokkos::make_pair(0u, N_points)); + StridedMatrix logdet_grad_space_view = Kokkos::subview(logdet_grad_space_, Kokkos::ALL(), Kokkos::make_pair(0u, N_points)); + Kokkos::fence(); + map->CoeffGradImpl(data, eval_space_view, grad_space_view); + map->LogDeterminantCoeffGradImpl(data, logdet_grad_space_view); + } + Kokkos::fence(); +} + +template +template +void FastGaussianReverseKLObjective::ClearSpaces() const { + constexpr ObjectiveType Type = static_cast(Type_idx); + if constexpr((Type == ObjectiveType::Eval) || (Type == ObjectiveType::EvalGrad)) { + Kokkos::deep_copy(eval_space_, 0.); + Kokkos::deep_copy(logdet_space_, 0.); + } + if constexpr((Type == ObjectiveType::Grad) || (Type == ObjectiveType::EvalGrad)) { + Kokkos::deep_copy(grad_space_, 0.); + Kokkos::deep_copy(logdet_grad_space_, 0.); + } +} + +template +class EvaluationFunctor { + public: + EvaluationFunctor(const EvalSpaceType& eval_space_view, const LogDetSpaceType& logdet_space_view): eval_space_view_(eval_space_view), logdet_space_view_(logdet_space_view){} + KOKKOS_FUNCTION void operator()(const unsigned int j, double& loss) const { + loss += 0.5*eval_space_view_(0, j)*eval_space_view_(0, j) - logdet_space_view_(j); + } + private: + const EvalSpaceType eval_space_view_; + const LogDetSpaceType logdet_space_view_; +}; + +template +class GradientFunctor { + using team_handle=typename Kokkos::TeamPolicy::member_type; + public: + GradientFunctor(KLGradSpaceType& kl_grad, unsigned int N_points, const GradSpaceType& grad_space_view, const GradLogDetSpaceType& logdet_grad_space_view): + kl_grad_(kl_grad), N_points_(N_points), grad_space_view_(grad_space_view), logdet_grad_space_view_(logdet_grad_space_view) { + } + KOKKOS_FUNCTION void operator()(const team_handle& team) const { + int d = team.league_rank(); + double grad_d_ = 0.; + Kokkos::parallel_reduce(Kokkos::TeamThreadRange(team, N_points_), [&](const unsigned int j, double& grad_d_tmp) { + grad_d_tmp += grad_space_view_(d, j) - logdet_grad_space_view_(d, j); + }, grad_d_); + kl_grad_(d) = grad_d_/N_points_; + } + + private: + mutable KLGradSpaceType kl_grad_; + const unsigned int N_points_; + const GradSpaceType grad_space_view_; + const GradLogDetSpaceType logdet_grad_space_view_; +}; + +template +template +double FastGaussianReverseKLObjective::CommonEval(StridedMatrix points, StridedVector kl_grad, std::shared_ptr> map) const { + if(map->outputDim != 1){ + throw std::invalid_argument("FastGaussianReverseKLObjective: Map output dimension must be 1! Found dimension " + std::to_string(map->outputDim) + "."); + } + constexpr ObjectiveType Type = static_cast(Type_idx); + unsigned int N_points = points.extent(1); + unsigned int numCoeffs = map->numCoeffs; + if(Type != ObjectiveType::Eval && numCoeffs != kl_grad.extent(0)){ + std::stringstream ss; + ss << "FastGaussianReverseKLObjective: Gradient vector has incorrect size!" + << "Given size " << kl_grad.extent(0) << ", expected size " << numCoeffs + << "."; + throw std::invalid_argument(ss.str().c_str()); + } + FillSpaces(map, points); + + double kl_loss = 0.; + if constexpr((Type == ObjectiveType::Eval) || (Type == ObjectiveType::EvalGrad)) { + Kokkos::RangePolicy eval_policy(0, N_points); + EvaluationFunctor eval_kernel(eval_space_, logdet_space_); + Kokkos::parallel_reduce("FastGaussianReverseKL Evaluate", eval_policy, eval_kernel, kl_loss); + kl_loss /= N_points; + } + if constexpr((Type == ObjectiveType::Grad) || (Type == ObjectiveType::EvalGrad)) { + Kokkos::TeamPolicy grad_policy(numCoeffs, Kokkos::AUTO()); + GradientFunctor gradient_functor(kl_grad, N_points, grad_space_, logdet_grad_space_); + + Kokkos::parallel_for("FastGaussianReverseKL Gradient", grad_policy, gradient_functor); + Kokkos::fence(); + } + ClearSpaces(); + return kl_loss; +} + +template +double FastGaussianReverseKLObjective::ObjectivePlusCoeffGradImpl(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const { + constexpr unsigned int EvalGrad_idx = static_cast(ObjectiveType::EvalGrad); + return CommonEval(data, grad, map); +} + +template +double FastGaussianReverseKLObjective::ObjectiveImpl(StridedMatrix data, std::shared_ptr> map) const { + Kokkos::View grad_holder; + constexpr unsigned int Eval_idx = static_cast(ObjectiveType::Eval); + return CommonEval(data, grad_holder, map); +} + +template +void FastGaussianReverseKLObjective::CoeffGradImpl(StridedMatrix data, StridedVector grad, std::shared_ptr> map) const { + constexpr unsigned int Grad_idx = static_cast(ObjectiveType::Grad); + CommonEval(data, grad, map); +} + template double KLObjective::ObjectiveImpl(StridedMatrix data, std::shared_ptr> map) const { unsigned int N_samps = data.extent(1); @@ -107,7 +251,7 @@ void KLObjective::CoeffGradImpl(StridedMatrix::Space; unsigned int N_samps = data.extent(1); unsigned int grad_dim = grad.extent(0); - PullbackDensity pullback {map, density_}; + PullbackDensity pullback {map, density_}; StridedMatrix densityGradX = pullback.LogDensityCoeffGrad(data); double scale = -1.0/((double) N_samps); @@ -131,11 +275,13 @@ void KLObjective::CoeffGradImpl(StridedMatrix; template class mpart::KLObjective; +template class mpart::FastGaussianReverseKLObjective; template std::shared_ptr> mpart::ObjectiveFactory::CreateGaussianKLObjective(StridedMatrix, unsigned int); template std::shared_ptr> mpart::ObjectiveFactory::CreateGaussianKLObjective(StridedMatrix, StridedMatrix, unsigned int); #if defined(MPART_ENABLE_GPU) template class mpart::MapObjective; template class mpart::KLObjective; + template class mpart::FastGaussianReverseKLObjective; template std::shared_ptr> mpart::ObjectiveFactory::CreateGaussianKLObjective(StridedMatrix, unsigned int); template std::shared_ptr> mpart::ObjectiveFactory::CreateGaussianKLObjective(StridedMatrix, StridedMatrix, unsigned int); #endif diff --git a/src/TrainMap.cpp b/src/TrainMap.cpp index af74a58b..92d79ccc 100644 --- a/src/TrainMap.cpp +++ b/src/TrainMap.cpp @@ -56,25 +56,136 @@ nlopt::opt SetupOptimization(unsigned int dim, TrainOptions options) { return opt; } -template<> -double mpart::TrainMap(std::shared_ptr> map, std::shared_ptr> objective, TrainOptions options) { +template +bool isMapObjectiveStandardGaussianKL(std::shared_ptr> objective) { + std::shared_ptr> klObjective = std::dynamic_pointer_cast>(objective); + if(klObjective == nullptr) return false; + std::shared_ptr> density = klObjective->density_; + std::shared_ptr> gaussianDensity = std::dynamic_pointer_cast>(density); + return gaussianDensity != nullptr && gaussianDensity->IsStandardNormal(); +} + +template +class NLOptFunctor { + public: + NLOptFunctor(std::shared_ptr> objective, std::shared_ptr> map): objective_(objective), map_(map) { + unsigned int numCoeffs = map->numCoeffs; + coeff_d_ = Kokkos::View("coeff", numCoeffs); + grad_d_ = Kokkos::View("grad", numCoeffs); + } + + double operator()(unsigned n, const double* coeff_ptr, double* grad_ptr) { + assert(n == coeff_d_.extent(0)); + Kokkos::View coeff = ToKokkos(coeff_ptr, n); + Kokkos::View grad = ToKokkos(grad_ptr, n); + Kokkos::deep_copy(coeff_d_, coeff); + double error; + if(grad_ptr != nullptr) { + error = (*objective_)(n, coeff_d_.data(), grad_d_.data(), map_); + Kokkos::deep_copy(grad, grad_d_); + Kokkos::deep_copy(grad_d_, 0); + } else { + error = (*objective_)(n, coeff_d_.data(), nullptr, map_); + } + return error; + } + + private: + Kokkos::View coeff_d_; + Kokkos::View grad_d_; + std::shared_ptr> objective_; + std::shared_ptr> map_; +}; + +template +std::function CreateNLOptObjective(std::shared_ptr> objective, std::shared_ptr> map) { + if constexpr(std::is_same_v) { + return std::bind(&MapObjective::operator(), objective, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3, map); + } else { + NLOptFunctor functor(objective, map); + return functor; + } +} + +template +double TrainComponent_StandardNormalDensity(std::shared_ptr> component, std::shared_ptr> objective, TrainOptions options) { + if(component->outputDim != 1) { + throw std::runtime_error("TrainComponent_StandardNormalDensity: Component must be scalar-valued. Has output dimension " + std::to_string(component->outputDim) + "."); + } + std::shared_ptr> mapObjective = std::dynamic_pointer_cast>(objective); + assert(mapObjective != nullptr); + std::function functor = CreateNLOptObjective(mapObjective, component); + + // Get the initial guess at the coefficients + std::vector compCoeffsStd = KokkosToStd(component->Coeffs()); + + // Optimize the map coefficients using NLopt + nlopt::opt opt = SetupOptimization(component->numCoeffs, options); + opt.set_min_objective(functor_wrapper, reinterpret_cast(&functor)); + + double error = 0.; + + nlopt::result res = opt.optimize(compCoeffsStd, error); + Kokkos::View compCoeffsView = VecToKokkos(compCoeffsStd); + component->SetCoeffs(compCoeffsView); + + if(options.verbose) { + if(res < 0) { + std::cerr << "WARNING: Optimization failed: " << MPART_NLOPT_FAILURE_CODES[-res] << std::endl; + } else { + std::cout << "Optimization result: " << MPART_NLOPT_SUCCESS_CODES[res] << std::endl; + } + std::cout << "Optimization error: " << error << "\n" + << "Optimization evaluations: " << opt.get_numevals() << std::endl; + } + return error; +} + +template +double TrainMap_Triangular_StandardNormalDensity(std::shared_ptr> map, std::shared_ptr> objective, TrainOptions options) { + if(options.verbose) { + std::cout << "Detected standard normal reference density with Triangular Map" << std::endl; + } + double total_error = 0.; + StridedMatrix trainSamples = objective->GetTrain(); + // Assume each component is a scalar-valued function + for(int i = 0; i < map->NumComponents(); i++) { + std::shared_ptr> component = map->GetComponent(i); + // Create a new objective for each component by slicing the KL objective + StridedMatrix trainSlice = Kokkos::subview(trainSamples, Kokkos::make_pair(0u, component->inputDim), Kokkos::ALL()); + std::shared_ptr> componentObjective = std::make_shared>(trainSlice, component->numCoeffs); + if(options.verbose) std::cout << "Training component " << i << "..." << std::endl; + total_error += TrainComponent_StandardNormalDensity(component, componentObjective, options); + if(options.verbose) { + std::cout << "Component " << i << " trained.\n" << std::endl; + } + } + return total_error; +} + +template +double mpart::TrainMap(std::shared_ptr> map, std::shared_ptr> objective, TrainOptions options) { if(map->Coeffs().extent(0) == 0) { if(options.verbose) { std::cout << "TrainMap: Initializing map coeffs to 1." << std::endl; } Kokkos::View coeffs ("Default coeffs", map->numCoeffs); - Kokkos::RangePolicy::Space> policy(0,map->numCoeffs); - Kokkos::parallel_for("Setting default coeff val", policy, KOKKOS_LAMBDA(const unsigned int i){ - coeffs(i) = 1.; - }); - Kokkos::View constCoeffs = coeffs; + Kokkos::deep_copy(coeffs, 1.); + Kokkos::View constCoeffs = coeffs; map->SetCoeffs(constCoeffs); } + std::shared_ptr> tri_map = std::dynamic_pointer_cast>(map); + bool isMapTriangular = tri_map != nullptr; + if(isMapObjectiveStandardGaussianKL(objective) && isMapTriangular) { + std::shared_ptr> klObjective = std::dynamic_pointer_cast>(objective); + return TrainMap_Triangular_StandardNormalDensity(tri_map, klObjective, options); + } + nlopt::opt opt = SetupOptimization(map->numCoeffs, options); // Since objective is (rightfully) separate from the map, we use std::bind to create a functor // from objective::operator() that keeps the map argument held. - std::function functor = std::bind(&MapObjective::operator(), objective, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3, map); + std::function functor = CreateNLOptObjective(objective, map); opt.set_min_objective(functor_wrapper, reinterpret_cast(&functor)); // Get the initial guess at the coefficients @@ -103,3 +214,8 @@ double mpart::TrainMap(std::shared_ptr> ma } return error; } + +template double mpart::TrainMap(std::shared_ptr> map, std::shared_ptr> objective, TrainOptions options); +#if defined(MPART_ENABLE_GPU) +template double mpart::TrainMap(std::shared_ptr> map, std::shared_ptr> objective, TrainOptions options); +#endif \ No newline at end of file diff --git a/src/Utilities/LinearAlgebra.cpp b/src/Utilities/LinearAlgebra.cpp index 9543ab96..ca36eee2 100644 --- a/src/Utilities/LinearAlgebra.cpp +++ b/src/Utilities/LinearAlgebra.cpp @@ -86,8 +86,7 @@ template<> void Cholesky::solveInPlace(Kokkos::View x) { auto eigX = KokkosToMat(x); - cholSolver_->matrixL().solveInPlace(eigX); - cholSolver_->matrixU().solveInPlace(eigX); + cholSolver_->solveInPlace(eigX); } template<> diff --git a/tests/Distributions/Test_Distributions_Common.cpp b/tests/Distributions/Test_Distributions_Common.cpp index fec92d61..66bad0b6 100644 --- a/tests/Distributions/Test_Distributions_Common.cpp +++ b/tests/Distributions/Test_Distributions_Common.cpp @@ -1,4 +1,5 @@ #include "Test_Distributions_Common.h" +using namespace Catch::Matchers; void TestStandardNormalSamples(StridedMatrix samples) { unsigned int dim = samples.extent(0); @@ -21,10 +22,14 @@ void TestStandardNormalSamples(StridedMatrix samples) // Check that the mean is zero and the covariance is the identity matrix for(int i = 0; i < dim; i++) { - REQUIRE(mean(i) == Approx(0.0).margin(mc_margin)); + CHECK_THAT(mean(i), WithinAbs(0.0, mc_margin)); for(int j = 0; j < dim; j++) { double diag = (double) (i == j); - REQUIRE(covar(i, j) - mean(i)*mean(j) == Approx(diag).margin(mc_margin)); + double cov_ij = covar(i, j) - mean(i)*mean(j); + if(i == j) + CHECK_THAT(cov_ij, WithinRel(1., mc_margin)); + else + CHECK_THAT(cov_ij, WithinAbs(0., mc_margin)); } } @@ -49,8 +54,8 @@ void TestStandardNormalSamples(StridedMatrix samples) double emp_two_std = 0.954499736104; double emp_three_std = 0.997300203937; for(int i = 0; i < dim; i++) { - REQUIRE(in_one_std[i]/(double)N_samp == Approx(emp_one_std).margin(mc_margin)); - REQUIRE(in_two_std[i]/(double)N_samp == Approx(emp_two_std).margin(mc_margin)); - REQUIRE(in_three_std[i]/(double)N_samp == Approx(emp_three_std).margin(mc_margin)); + CHECK_THAT(in_one_std[i]/(double)N_samp, WithinRel(emp_one_std, mc_margin)); + CHECK_THAT(in_two_std[i]/(double)N_samp, WithinRel(emp_two_std, mc_margin)); + CHECK_THAT(in_three_std[i]/(double)N_samp, WithinRel(emp_three_std, mc_margin)); } } \ No newline at end of file diff --git a/tests/Test_MapObjective.cpp b/tests/Test_MapObjective.cpp index a89f61ac..7cb982ee 100644 --- a/tests/Test_MapObjective.cpp +++ b/tests/Test_MapObjective.cpp @@ -10,6 +10,7 @@ using namespace mpart; using namespace Catch; +using namespace Catch::Matchers; TEST_CASE( "Test KLMapObjective", "[KLMapObjective]") { unsigned int dim = 2; @@ -39,7 +40,7 @@ TEST_CASE( "Test KLMapObjective", "[KLMapObjective]") { double inv_cov_diag = map_scale*map_scale; double kl_exact = -std::log(inv_cov_diag) - 1 + inv_cov_diag + map_shift*map_shift*inv_cov_diag; kl_exact /= 2.; - CHECK(kl_exact == Approx(kl_est).margin(0.5)); + CHECK_THAT(kl_exact, WithinRel(kl_est, 0.5)); } // Setup map for following sections @@ -57,7 +58,7 @@ TEST_CASE( "Test KLMapObjective", "[KLMapObjective]") { map->Coeffs()(i) += fd_step; double kl_perturb_i = objective.ObjectiveImpl(reference_samples, map); double coeffFD_i = (kl_perturb_i - kl_est)/fd_step; - CHECK(coeffFD_i == Approx(coeffGrad(i)).margin(5*fd_step)); + CHECK_THAT(coeffFD_i, WithinRel(coeffGrad(i), 10*fd_step)); map->Coeffs()(i) -= fd_step; } } @@ -67,9 +68,9 @@ TEST_CASE( "Test KLMapObjective", "[KLMapObjective]") { Kokkos::View coeffGrad ("CoeffGrad of KL Obj", map->numCoeffs); objective.CoeffGradImpl(reference_samples, coeffGradRef, map); double kl_est = objective.ObjectivePlusCoeffGradImpl(reference_samples, coeffGrad, map); - CHECK(kl_est_ref == Approx(kl_est).margin(1e-12)); + CHECK_THAT(kl_est_ref, WithinRel(kl_est, 1e-12)); for(int i = 0; i < map->numCoeffs; i++) { - CHECK(coeffGradRef(i) == Approx(coeffGrad(i)).margin(1e-12)); + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-12)); } } SECTION("MapObjectiveFunctions") { @@ -78,19 +79,79 @@ TEST_CASE( "Test KLMapObjective", "[KLMapObjective]") { Kokkos::View coeffGrad ("CoeffGrad of KL Obj", map->numCoeffs); double kl_est_ref = objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGradRef, map); double kl_est = objective(map->numCoeffs, map->Coeffs().data(), coeffGrad.data(), map); - CHECK(kl_est_ref == Approx(kl_est).margin(1e-12)); + CHECK_THAT(kl_est_ref, WithinRel(kl_est, 1e-12)); for(int i = 0; i < map->numCoeffs; i++) { - CHECK(coeffGradRef(i) == Approx(coeffGrad(i)).margin(1e-12)); + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-12)); } // TestError double test_error_ref = objective.ObjectiveImpl(test_samples, map); double test_error = objective.TestError(map); - CHECK(test_error_ref == Approx(test_error).margin(1e-12)); + CHECK_THAT(test_error_ref, WithinRel(test_error, 1e-12)); // TrainCoeffGrad StridedVector trainCoeffGrad = objective.TrainCoeffGrad(map); for(int i = 0; i < map->numCoeffs; i++){ - CHECK(coeffGradRef(i) == Approx(trainCoeffGrad(i)).margin(1e-12)); + CHECK_THAT(coeffGradRef(i), WithinRel(trainCoeffGrad(i), 1e-12)); } } } +TEST_CASE( "Test FastGaussianReverseKLObjective", "[FastGaussianReverseKLObjective]") { + unsigned int dim = 1; + unsigned int maxOrder = 4; + auto map = MapFactory::CreateTriangular(dim, dim, maxOrder); + + unsigned int seed = 42; + unsigned int N_samples = 20000; + unsigned int N_testpts = N_samples/5; + + std::shared_ptr> density = std::make_shared>(dim); + density->SetSeed(seed); + StridedMatrix reference_samples = density->Sample(N_samples); + StridedMatrix test_samples = Kokkos::subview(reference_samples, Kokkos::ALL, Kokkos::make_pair(0u,N_testpts)); + StridedMatrix train_samples = Kokkos::subview(reference_samples, Kokkos::ALL, Kokkos::make_pair(N_testpts,N_samples)); + + FastGaussianReverseKLObjective objective {train_samples, test_samples, map->numCoeffs}; + KLObjective klObjective {train_samples, test_samples, density}; + Kokkos::View coeffGradRef ("Reference CoeffGrad of KL Obj", map->numCoeffs); + Kokkos::View coeffGrad ("CoeffGrad of KL Obj", map->numCoeffs); + Kokkos::deep_copy(map->Coeffs(), 0.1); + + SECTION("ObjectivePlusCoeffGradImpl"){ + double kl_err_ref = klObjective.ObjectivePlusCoeffGradImpl(train_samples, coeffGradRef, map); + double kl_err = objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGrad, map); + for(int i = 0; i < map->numCoeffs; i++) { + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-12)); + } + Kokkos::deep_copy(map->Coeffs(), 0.5); + double kl_err_ref2 = klObjective.ObjectivePlusCoeffGradImpl(train_samples, coeffGradRef, map); + double kl_err2 = objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGrad, map); + double slope = (kl_err_ref2 - kl_err_ref) / (kl_err2 - kl_err); + CHECK_THAT(slope, WithinRel(1., 1e-12)); + double shift = kl_err_ref; + for(int i = 0; i < map->numCoeffs; i++) { + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-12)); + } + Kokkos::deep_copy(map->Coeffs(), -0.1); + double kl_err_ref3 = klObjective.ObjectivePlusCoeffGradImpl(train_samples, coeffGradRef, map); + double kl_err3 = objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGrad, map); + double pred_ref = (kl_err3-kl_err)*slope + shift; + CHECK_THAT(kl_err_ref3, WithinRel(pred_ref, 1e-12)); + for(int i = 0; i < map->numCoeffs; i++) { + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-12)); + } + } + SECTION("ObjectiveImpl") { + double kl_err_cgrad = objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGrad, map); + double kl_err = objective.ObjectiveImpl(train_samples, map); + CHECK_THAT(kl_err_cgrad, WithinRel(kl_err, 1e-14)); + } + SECTION("CoeffGradImpl") { + Kokkos::deep_copy(coeffGrad, 0.); + Kokkos::deep_copy(coeffGradRef, 0.); + objective.ObjectivePlusCoeffGradImpl(train_samples, coeffGradRef, map); + objective.CoeffGradImpl(train_samples, coeffGrad, map); + for(int i = 0; i < map->numCoeffs; i++) { + CHECK_THAT(coeffGradRef(i), WithinRel(coeffGrad(i), 1e-14)); + } + } +} \ No newline at end of file diff --git a/tests/Test_TrainMap.cpp b/tests/Test_TrainMap.cpp index 57e9c142..a22b9a30 100644 --- a/tests/Test_TrainMap.cpp +++ b/tests/Test_TrainMap.cpp @@ -1,5 +1,6 @@ #include +#include "MParT/MultiIndices/MultiIndexSet.h" #include "MParT/MapFactory.h" #include "MParT/MapObjective.h" #include "MParT/TrainMap.h" @@ -12,9 +13,16 @@ using namespace mpart; using namespace Catch; -using MemorySpace = Kokkos::HostSpace; +#if defined(MPART_ENABLE_GPU) + using MemorySpace2 = mpart::DeviceSpace; +#else + using MemorySpace2 = std::false_type; +#endif -TEST_CASE("Test_TrainMap", "[TrainMap]") { +TEMPLATE_TEST_CASE("Test_TrainMap", "[TrainMap]", Kokkos::HostSpace, MemorySpace2) { +// Skip the test if the MemorySpace is false_type, i.e., no device space is available +if constexpr (!std::is_same_v) { + using MemorySpace = TestType; unsigned int seed = 42; unsigned int dim = 2; unsigned int numPts = 5000; @@ -22,7 +30,7 @@ TEST_CASE("Test_TrainMap", "[TrainMap]") { auto sampler = std::make_shared>(3); sampler->SetSeed(seed); auto samples = sampler->Sample(numPts); - Kokkos::View targetSamples("targetSamples", 3, numPts); + Kokkos::View targetSamples("targetSamples", 3, numPts); Kokkos::RangePolicy::Space> policy {0u, numPts}; Kokkos::parallel_for("Banana", policy, KOKKOS_LAMBDA(const unsigned int i) { targetSamples(0,i) = samples(0,i); @@ -31,45 +39,68 @@ TEST_CASE("Test_TrainMap", "[TrainMap]") { }); unsigned int map_order = 2; SECTION("SquareMap") { - StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::make_pair(0u, testPts)); - StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::make_pair(testPts, numPts)); + StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::make_pair(0u, testPts)); + StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::make_pair(testPts, numPts)); auto obj = ObjectiveFactory::CreateGaussianKLObjective(trainSamps, testSamps); MapOptions map_options; - auto map = MapFactory::CreateTriangular(dim, dim, map_order, map_options); + auto map = MapFactory::CreateTriangular(dim, dim, map_order, map_options); TrainOptions train_options; train_options.verbose = 0; TrainMap(map, obj, train_options); - auto pullback_samples = map->Evaluate(testSamps); - TestStandardNormalSamples(pullback_samples); + StridedMatrix pullback_samples = map->Evaluate(testSamps); + StridedMatrix pullback_samples_h = Kokkos::create_mirror_view_and_copy(Kokkos::HostSpace(), pullback_samples); + TestStandardNormalSamples(pullback_samples_h); } SECTION("ComponentMap") { - StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::pair(0, testPts)); - StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::pair(testPts, numPts)); + StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::pair(0, testPts)); + StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::make_pair(1u,3u), Kokkos::pair(testPts, numPts)); auto obj = ObjectiveFactory::CreateGaussianKLObjective(trainSamps, testSamps, 1); MapOptions map_options; - std::shared_ptr> map = MapFactory::CreateComponent(FixedMultiIndexSet(dim,map_order), map_options); + MultiIndexSet mset = MultiIndexSet::CreateTotalOrder(dim, map_order); + FixedMultiIndexSet fmset = mset.Fix().ToDevice(); + std::shared_ptr> map = MapFactory::CreateComponent(fmset, map_options); TrainOptions train_options; train_options.verbose = 0; TrainMap(map, obj, train_options); - auto pullback_samples = map->Evaluate(testSamps); - TestStandardNormalSamples(pullback_samples); + StridedMatrix pullback_samples = map->Evaluate(testSamps); + StridedMatrix pullback_samples_h = Kokkos::create_mirror_view_and_copy(Kokkos::HostSpace(), pullback_samples); + TestStandardNormalSamples(pullback_samples_h); } + StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::ALL(), Kokkos::pair(0, testPts)); + StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::ALL(), Kokkos::pair(testPts, numPts)); SECTION("RectangleMap") { - StridedMatrix testSamps = Kokkos::subview(targetSamples, Kokkos::ALL(), Kokkos::pair(0, testPts)); - StridedMatrix trainSamps = Kokkos::subview(targetSamples, Kokkos::ALL(), Kokkos::pair(testPts, numPts)); auto obj = ObjectiveFactory::CreateGaussianKLObjective(trainSamps, testSamps, 2); MapOptions map_options; - std::shared_ptr> map = MapFactory::CreateTriangular(dim+1, dim, map_order, map_options); + std::shared_ptr> map = MapFactory::CreateTriangular(dim+1, dim, map_order, map_options); TrainOptions train_options; train_options.verbose = 0; TrainMap(map, obj, train_options); - auto pullback_samples = map->Evaluate(testSamps); - TestStandardNormalSamples(pullback_samples); + StridedMatrix pullback_samples = map->Evaluate(testSamps); + StridedMatrix pullback_samples_h = Kokkos::create_mirror_view_and_copy(Kokkos::HostSpace(), pullback_samples); + TestStandardNormalSamples(pullback_samples_h); } + SECTION("RectangleMap_reference") { + Kokkos::View covar("reference", dim, dim); + Kokkos::MDRangePolicy::Space, Kokkos::Rank<2>> policy({0,0}, {dim,dim}); + Kokkos::parallel_for("Covar", policy, KOKKOS_LAMBDA(const unsigned int i, const unsigned int j) { + covar(i,j) = double(i == j); + }); + std::shared_ptr> dens = std::make_shared>(covar); // Ensures that the KLObjective isn't shortcut-ed + std::shared_ptr> obj = std::make_shared>(trainSamps, testSamps, dens); + MapOptions map_options; + std::shared_ptr> map = MapFactory::CreateTriangular(dim+1, dim, map_order, map_options); + TrainOptions train_options; + train_options.verbose = 0; + TrainMap(map, obj, train_options); + StridedMatrix pullback_samples = map->Evaluate(testSamps); + StridedMatrix pullback_samples_h = Kokkos::create_mirror_view_and_copy(Kokkos::HostSpace(), pullback_samples); + TestStandardNormalSamples(pullback_samples_h); + } +} // End possible skip of GPU test } \ No newline at end of file