Skip to content

Commit

Permalink
Merge pull request #118 from gergondet/topic/AdaptToEigen33
Browse files Browse the repository at this point in the history
Compilation fixes for Eigen 3.3, 3.2.10 and GCC 5.4, CI fixes with clang (OSX and Linux)
  • Loading branch information
bchretien authored Dec 21, 2016
2 parents 4bc672a + 89d0d5b commit 9f95efd
Show file tree
Hide file tree
Showing 11 changed files with 169 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .travis
7 changes: 6 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ env:
- secure: "F9mibfsT3Ezk0RRsgOHy8RUO82H05REGmJKLDKdHiHE3toxykQX64oNBB8txjHoi+UF0H1Gwmk4oB/pNeILtyHdSuB7hB1Muod9wspOuKrzO7SMTGdjpPw7nDjQEmtfrZFhXEGb/3xm66uSGojhwe0RJF5mOzyfno+G8SIJyos4="
- secure: "Hn0mNkHgP0azydZUqq3CXifrDw+6adcJ541/EInw59BhM9Q7orJEDsfkqSMICLsIQxe+qNb0pYjCze10GZQPqhNBKcsDGmMDOqvwNPyOqEy8TOWl0j6emO3K0QVbfz7l2I9Gvwym/ZVQ+4XH1oZgmfJOPgxLmWZ69wZUDFBlMCg="
- secure: "Atmvwjxe3WqmUPumdDubBTvgC1v5JbIVtgBwuvZOsYdQ36eKOCSBuS0uVS0WbHHmeljeVUzb1EEGitKKjS6hYAkdg3F5eFmHPophL8zUHkY4fGAE80oGyPAFOeqZzIYp5cpStR8540K32C7vkqKqPFkufwLGKI0QZ87HP3U60AM="
- EIGEN_MAJOR=3.2
- APT_DEPENDENCIES="cmake cmake-data doxygen libltdl-dev libboost-all-dev liblog4cxx10-dev"
- HOMEBREW_DEPENDENCIES="doxygen log4cxx"
- GIT_DEPENDENCIES=""
Expand Down Expand Up @@ -37,7 +38,7 @@ after_failure: ./.travis/run after_failure
before_install:
- pip install --user codecov
- ./.travis/run before_install
- ./.travis/dependencies/eigen-3.2
- ./.travis/dependencies/eigen-${EIGEN_MAJOR}

matrix:
allow_failures:
Expand Down Expand Up @@ -66,6 +67,10 @@ matrix:
dist: trusty
compiler: clang
env: ENABLE_CXX11=1
- os: linux
dist: trusty
compiler: gcc
env: EIGEN_MAJOR=3.3
- os: linux
dist: precise
compiler: gcc
Expand Down
10 changes: 10 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,16 @@ IF(HAS_VISIBILITY)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
ENDIF()

# If we were given coverage options, we will only pass them to the core
# library.
SET(GENERATE_COVERAGE FALSE CACHE BOOL "Generate coverage data")
IF("${CMAKE_CXX_FLAGS}" MATCHES "--coverage")
SET(GENERATE_COVERAGE TRUE CACHE BOOL "Generate coverage data" FORCE)
STRING(REPLACE "--coverage" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
STRING(REPLACE "--coverage" "" CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS}")
STRING(REPLACE "--coverage" "" CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS}")
ENDIF()

# Search for dependencies.
SET(BOOST_COMPONENTS date_time filesystem system unit_test_framework)
SEARCH_FOR_BOOST()
Expand Down
32 changes: 32 additions & 0 deletions include/roboptim/core/decorator/finite-difference-gradient.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -470,20 +470,36 @@ namespace roboptim
std::vector<triplet_t> coefficients;

// For each column
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
for (jacobian_t::StorageIndex j = 0; j < this->adaptee_.inputSize (); ++j)
#else
for (jacobian_t::Index j = 0; j < this->adaptee_.inputSize (); ++j)
#endif
{
gradient_t col (this->adaptee_.outputSize ());

computeColumn (epsilon, col, argument, j, xEps);

#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
const matrix_t::StorageIndex j_ = j;
#else
const matrix_t::Index j_ = static_cast<const matrix_t::Index> (j);
#endif
for (gradient_t::InnerIterator it (col); it; ++it)
{
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
const matrix_t::StorageIndex idx = it.index ();
#else
const matrix_t::Index idx =
static_cast<const matrix_t::Index> (it.index ());
#endif

#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
assert (idx < this->adaptee_.outputSize ());
#else
assert (idx < static_cast<const matrix_t::Index>
(this->adaptee_.outputSize ()));
#endif

coefficients.push_back
(triplet_t (idx, j_, it.value ()));
Expand Down Expand Up @@ -743,20 +759,36 @@ namespace roboptim
typedef Eigen::Triplet<double> triplet_t;

std::vector<triplet_t> coefficients;
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
for (jacobian_t::StorageIndex i = 0; i < this->adaptee_.outputSize (); ++i)
#else
for (jacobian_t::Index i = 0; i < this->adaptee_.outputSize (); ++i)
#endif
{
gradient_t grad (this->adaptee_.inputSize ());

computeGradient (epsilon, grad, argument, i, xEps);

#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
const matrix_t::StorageIndex i_ = i;
#else
const matrix_t::Index i_ = static_cast<const matrix_t::Index> (i);
#endif
for (gradient_t::InnerIterator it (grad); it; ++it)
{
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
const matrix_t::StorageIndex idx = it.index ();
#else
const matrix_t::Index idx =
static_cast<const matrix_t::Index> (it.index ());
#endif

#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
assert (idx < this->adaptee_.inputSize ());
#else
assert (idx < static_cast<const matrix_t::Index>
(this->adaptee_.inputSize ()));
#endif

coefficients.push_back
(triplet_t (i_, idx, it.value ()));
Expand Down
9 changes: 7 additions & 2 deletions include/roboptim/core/differentiable-function.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,19 @@ namespace roboptim
const
{
typedef Eigen::Triplet<value_type> triplet_t;
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
typedef jacobian_t::StorageIndex index_t;
#else
typedef jacobian_t::Index index_t;
#endif
std::vector<triplet_t> coefficients;

for (jacobian_t::Index i = 0; i < this->outputSize (); ++i)
for (index_t i = 0; i < this->outputSize (); ++i)
{
gradient_t grad = gradient (argument, i);
for (gradient_t::InnerIterator it (grad); it; ++it)
{
const jacobian_t::Index
const index_t
idx = static_cast<const jacobian_t::Index> (it.index ());
coefficients.push_back
(triplet_t (i, idx, it.value ()));
Expand Down
13 changes: 13 additions & 0 deletions include/roboptim/core/operator/product.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
# include <boost/utility/enable_if.hpp>
# include <boost/type_traits/is_same.hpp>
# include <boost/mpl/and.hpp>
# include <Eigen/Core>

namespace roboptim
{
Expand Down Expand Up @@ -164,22 +165,34 @@ namespace roboptim

// For each column of the Jacobian
// FIXME: do it Eigen-style
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
for (typename Types<U,V>::jacobian_t::StorageIndex i = 0;
#else
for (typename Types<U,V>::jacobian_t::Index i = 0;
#endif
i < jac_uv.cols (); ++i)
{
// grad_uv = u * grad_v;
for (typename Types<U,V>::jacobianV_t::InnerIterator
it (jac_v, i); it; ++it)
{
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
typename Types<U,V>::gradientV_t::StorageIndex id = it.index ();
#else
typename Types<U,V>::gradientV_t::Index id = it.index ();
#endif
tripletList.push_back (triplet_t (id, i, u (id) * it.value ()));
}

// grad_uv += v * grad_u;
for (typename Types<U,V>::jacobianU_t::InnerIterator
it (jac_u, i); it; ++it)
{
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
typename Types<U,V>::gradientU_t::StorageIndex id = it.index ();
#else
typename Types<U,V>::gradientU_t::Index id = it.index ();
#endif
tripletList.push_back (triplet_t (id, i, v (id) * it.value ()));
}
}
Expand Down
6 changes: 3 additions & 3 deletions include/roboptim/core/problem.hh
Original file line number Diff line number Diff line change
Expand Up @@ -238,12 +238,12 @@ namespace roboptim
/// \brief Retrieve arguments scaling (deprecated version).
///
/// \deprecated Deprecated since version 3.1. Use argumentScaling() instead.
ROBOPTIM_CORE_DEPRECATED scales_t& argumentScales ();
ROBOPTIM_CORE_DEPRECATED scaling_t& argumentScales ();

/// \brief Retrieve arguments scaling (deprecated version).
///
/// \deprecated Deprecated since version 3.1. Use argumentScaling() instead.
ROBOPTIM_CORE_DEPRECATED const scales_t& argumentScales () const;
ROBOPTIM_CORE_DEPRECATED const scaling_t& argumentScales () const;

/// \brief Retrieve arguments names.
/// Arguments names define a name for each argument. This is particularly
Expand Down Expand Up @@ -309,7 +309,7 @@ namespace roboptim
/// \brief Retrieve constraints scaling vector (deprecated version).
///
/// \deprecated Deprecated since version 3.1. Use scalingVector() instead.
ROBOPTIM_CORE_DEPRECATED const scalesVect_t& scalesVector () const;
ROBOPTIM_CORE_DEPRECATED const scalingVect_t& scalesVector () const;

/// \brief Return the output size of the problem's constraints.
size_type constraintsOutputSize () const;
Expand Down
90 changes: 84 additions & 6 deletions include/roboptim/core/problem.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,83 @@
namespace roboptim
{

namespace detail
{
/// \internal
/// \brief Scale a row of an input jacobian.
/// \param j input jacobian.
/// \param ri row index.
/// \param s scaling value.
template <typename T>
inline void scale_jacobian_row(typename roboptim::Problem<T>::jacobian_t & j,
typename roboptim::Problem<T>::size_type ri,
const typename roboptim::Problem<T>::value_type & s)
{
j.middleRows(ri, 1) *= s;
}

#if EIGEN_VERSION_AT_LEAST(3, 2, 90) && (! EIGEN_VERSION_AT_LEAST(3, 3, 2))
/// Specialization for sparse type in Eigen >= 3.2.90 and < 3.3.2
/// See #115
template<>
inline void scale_jacobian_row<roboptim::EigenMatrixSparse>(
roboptim::Problem<roboptim::EigenMatrixSparse>::jacobian_t & j,
roboptim::Problem<roboptim::EigenMatrixSparse>::size_type ri,
const roboptim::Problem<roboptim::EigenMatrixSparse>::value_type & s)
{
typedef roboptim::Problem<roboptim::EigenMatrixSparse>::size_type size_type;
typedef roboptim::Problem<roboptim::EigenMatrixSparse>::jacobian_t jacobian_t;
for (size_type k = 0; k < j.outerSize (); ++k)
{
for (jacobian_t::InnerIterator it(j, k); it; ++it)
{
if (it.row() == ri)
{
it.valueRef () *= s;
}
}
}
}
#endif

/// \internal
/// \brief Scale a column of an input jacobian.
/// \param j input jacobian.
/// \param ci column index.
/// \param s scaling value.
template <typename T>
inline void scale_jacobian_col(typename roboptim::Problem<T>::jacobian_t & j,
typename roboptim::Problem<T>::size_type ci,
const typename roboptim::Problem<T>::value_type & s)
{
j.middleCols(ci, 1) *= s;
}

#if EIGEN_VERSION_AT_LEAST(3, 2, 90) && (! EIGEN_VERSION_AT_LEAST(3, 3, 2))
/// Specialization for sparse type in Eigen >= 3.2.90 and < 3.3.2
/// See #115
template<>
inline void scale_jacobian_col<roboptim::EigenMatrixSparse>(
roboptim::Problem<roboptim::EigenMatrixSparse>::jacobian_t & j,
roboptim::Problem<roboptim::EigenMatrixSparse>::size_type ci,
const roboptim::Problem<roboptim::EigenMatrixSparse>::value_type & s)
{
typedef roboptim::Problem<roboptim::EigenMatrixSparse>::size_type size_type;
typedef roboptim::Problem<roboptim::EigenMatrixSparse>::jacobian_t jacobian_t;
for (size_type k = 0; k < j.outerSize (); ++k)
{
for (jacobian_t::InnerIterator it(j, k); it; ++it)
{
if (it.col() == ci)
{
it.valueRef () *= s;
}
}
}
}
#endif
}

//
// General template implementation
//
Expand Down Expand Up @@ -308,7 +385,7 @@ namespace roboptim
}

template <typename T>
const typename Problem<T>::scalesVect_t&
const typename Problem<T>::scalingVect_t&
Problem<T>::scalesVector () const
{
return scalingVector ();
Expand Down Expand Up @@ -343,14 +420,14 @@ namespace roboptim
}

template <typename T>
typename Problem<T>::scales_t&
typename Problem<T>::scaling_t&
Problem<T>::argumentScales ()
{
return argumentScaling ();
}

template <typename T>
const typename Problem<T>::scales_t&
const typename Problem<T>::scaling_t&
Problem<T>::argumentScales () const
{
return argumentScaling ();
Expand Down Expand Up @@ -422,8 +499,8 @@ namespace roboptim
df = (*c)->template castInto<differentiableFunction_t> ();
for (size_type i = 0; i < df->outputSize (); ++i)
{
jac.row(global_row + i) *=
scalingVect_[c_idx][static_cast<size_t> (i)];
detail::scale_jacobian_row<T> (jac, global_row + i,
scalingVect_[c_idx][static_cast<size_t> (i)]);
}
global_row += df->outputSize ();
}
Expand All @@ -433,7 +510,8 @@ namespace roboptim
// Apply argument scaling parameters
for (size_t i = 0; i < argumentScaling_.size (); ++i)
{
jac.col (static_cast<size_type> (i)) *= argumentScaling_[i];
detail::scale_jacobian_col<T> (jac, static_cast<size_type> (i),
argumentScaling_[i]);
}

return jac;
Expand Down
2 changes: 1 addition & 1 deletion include/roboptim/core/result-analyzer.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ namespace roboptim
size_type l_idx = ci->local;
size_type as_idx = ci->active;

if (activeJac_.row (as_idx).norm () < eps_)
if (activeJac_.middleRows (as_idx, 1).norm () < eps_)
{
null_grad.null_rows++;
null_grad.constraint_indices[pb_.constraints ()[g_idx]].push_back (l_idx);
Expand Down
8 changes: 8 additions & 0 deletions include/roboptim/core/util.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,10 @@ namespace roboptim
typedef M matrix_t;
typedef B block_t;
typedef typename M::Index index_t;
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
typedef typename Eigen::internal::evaluator<block_t>::InnerIterator EvalIterator;
Eigen::internal::evaluator<block_t> thisEval(b);
#endif

// Make sure that the block fits in the matrix
ROBOPTIM_ASSERT (startRow + b.rows () <= m.rows ());
Expand All @@ -184,7 +188,11 @@ namespace roboptim
{
// Get iterator to first matrix element in the block
typename matrix_t::InnerIterator m_it (m, outer_start + k);
#if EIGEN_VERSION_AT_LEAST(3, 2, 90)
EvalIterator b_it(thisEval, k);
#else
typename block_t::InnerIterator b_it (b, k);
#endif

if (!(b_it))
continue;
Expand Down
4 changes: 4 additions & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ ENDIF()
# Main library.
ADD_LIBRARY(roboptim-core SHARED ${HEADERS} ${ROBOPTIM_CORE_SRC})
TARGET_COMPILE_OPTIONS(roboptim-core PUBLIC "-Droboptim_core_EXPORTS")
IF(GENERATE_COVERAGE)
TARGET_COMPILE_OPTIONS(roboptim-core PRIVATE "--coverage")
SET_TARGET_PROPERTIES(roboptim-core PROPERTIES LINK_FLAGS "--coverage")
ENDIF()
PKG_CONFIG_USE_DEPENDENCY(roboptim-core liblog4cxx)

# Add required libs to pkg-config file.
Expand Down

0 comments on commit 9f95efd

Please sign in to comment.