Writing new Eigen expression

474 views Asked by At

I am trying to write new Eigen expression following latest documentation https://eigen.tuxfamily.org/dox-devel/TopicNewExpressionType.html. Basically, what I want is part of reshape functionality that is still absent in Eigen. So chop_expr(Eigen vector expression here) should reshape input vector to n times matrix.

Unfortunately, what I implemented doesn't work with expressions allocated on heap, for example code below doesn't work, but after changing MAXV to 10, everything goes perfect.

Another question is about

enum {Flags = EvalBeforeNestingBit}

I found that I need it otherwise, when I chop matrix multiplication, Eigen doesn't create temporaries, but I guess that this way I force chop_expr to create temporary for any other expressions also. So the question is how should I do it properly?

namespace Eigen {

template <int chunk, typename Derived> struct ChoppedExpression;

namespace internal {

template <int chunk, typename Derived>
struct traits<ChoppedExpression<chunk, Derived>> : traits<Derived> {
  enum {Flags = EvalBeforeNestingBit};
  enum {IsRowMajor = 0};

  enum {   RowsAtCompileTime = chunk};
  enum {MaxRowsAtCompileTime = chunk};

  enum {ColsAtCompileTime = (Derived::RowsAtCompileTime == Eigen::Dynamic 
      ? Eigen::Dynamic : Derived::RowsAtCompileTime / chunk)};

  enum {MaxColsAtCompileTime = (Derived::MaxRowsAtCompileTime == Eigen::Dynamic
      ? Eigen::Dynamic : (Derived::MaxRowsAtCompileTime + chunk - 1) / chunk)};
};

}  // namespace internal

template <int chunk, class Derived> struct ChoppedExpression
  : public MatrixBase<ChoppedExpression<chunk, Derived>> {

   ChoppedExpression(const Derived& arg) : m_arg(arg) {
   EIGEN_STATIC_ASSERT(Derived::ColsAtCompileTime == 1,
       YOU_TRIED_CALLING_A_VECTOR_METHOD_ON_A_MATRIX);

   EIGEN_STATIC_ASSERT(Derived::RowsAtCompileTime % chunk == 0
        || Derived::RowsAtCompileTime == Eigen::Dynamic,
       VECTOR_SHOULD_HAVE_INTEGER_NUMBER_OF_CHUNKS_FOR_CHOPPING);
  }

  typedef Index Index;

  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index rows() const { return chunk; }
  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index cols() const { return m_arg.size() / chunk; }

  typedef typename internal::ref_selector<ChoppedExpression>::type Nested;
  typedef typename internal::ref_selector<Derived>::type DerivedTypeNested;

  DerivedTypeNested m_arg;
};

namespace internal {

template<int chunk, typename Derived>
struct evaluator<ChoppedExpression<chunk, Derived>>
  : public evaluator_base<ChoppedExpression<chunk, Derived>> {

  typedef ChoppedExpression<chunk, Derived> XprType;
  typedef typename nested_eval<Derived, XprType::ColsAtCompileTime>::type DerivedNested;
  typedef typename remove_all<DerivedNested>::type DerivedNestedCleaned;
  typedef typename XprType::CoeffReturnType CoeffReturnType;

  enum {
    CoeffReadCost = evaluator<DerivedNestedCleaned>::CoeffReadCost,
    Flags = traits<XprType>::Flags, IsRowMajor = 0
  };

  evaluator(const XprType& xpr) : m_argImpl(xpr.m_arg) {}

  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(Index row, Index col) const
    { return m_argImpl.coeff(col * chunk + row); }

  evaluator<DerivedNestedCleaned> m_argImpl;
};

}  // namespace internal
}  // namespace Eigen

template<int chunk, typename Derived> EIGEN_ALWAYS_INLINE
EIGEN_DEVICE_FUNC static Eigen::ChoppedExpression<chunk, Derived>
chop_expr(const Eigen::MatrixBase<Derived> &expr)
  { return Eigen::ChoppedExpression<chunk, Derived>(expr.derived()); }


#define MAXV -1

Eigen::Matrix<double, -1, 1, 0, std::max(3*MAXV, -1)> _blendshapes(2, 1);

int main() {
  for (int i = 0; i < 2; ++i) _blendshapes[i] = double(i + 10);

  std::cout << chop_expr<2>(_blendshapes + Eigen::Matrix<double, 2, 1>(1, 1)) << std::endl;
}

Update

Finally, I found the way to make it work. The solution is to remove DerivedNested and DerivedNestedCleaned typedefs (I obviously don't need them in my expression as it is just reshaping, but, I can't explain why it causes incorrect results). Thus the only question left is what should I do about EvalBeforeNestingBit?

1

There are 1 answers

1
ggael On BEST ANSWER

You do not need the EvalBeforeNestingBit, but you must be careful when propagating the flags in the evaluator. To be safe, write:

Flags = traits<XprType>::Flags&HereditaryBits