Question

I incurred in a compiler dependent issue with the following code (stored in crtp.cc):

#include <vector>
#include <cassert>
#include <iostream>

template < class Derived >
class AlgebraicVectorExpression {
public:
  typedef std::vector<double>::size_type  SizeType;
  typedef std::vector<double>::value_type ValueType;
  typedef std::vector<double>::reference  ReferenceType;

  SizeType size() const {
    return static_cast<const Derived&>(*this).size();
  }

  ValueType operator[](SizeType ii) const {
    return static_cast<const Derived&>(*this)[ii];
  }

  operator Derived&() {
    return static_cast<Derived&>(*this);
  }

  operator const Derived&() const {
    return static_cast< const Derived& >(*this);
  }
};

template< class T1, class T2>
class AlgebraicVectorSum : public AlgebraicVectorExpression< AlgebraicVectorSum<T1,T2> > {
  const T1 & a_;
  const T2 & b_;

  typedef typename AlgebraicVectorExpression< AlgebraicVectorSum<T1,T2> >::SizeType  SizeType;
  typedef typename AlgebraicVectorExpression< AlgebraicVectorSum<T1,T2> >::ValueType ValueType;
public:

  AlgebraicVectorSum(const AlgebraicVectorExpression<T1>& a, const AlgebraicVectorExpression<T1>& b) :
    a_(a), b_(b)  {
    assert(a_.size() == b_.size());
  }

  SizeType size() const {
    return a_.size();
  }

  ValueType operator[](SizeType ii) const {
    return (a_[ii] + b_[ii]);
  }

};

template< class T1, class T2>
const AlgebraicVectorSum<T1,T2>
operator+(const AlgebraicVectorExpression<T1>& a, const AlgebraicVectorExpression<T2>& b) {
  return AlgebraicVectorSum<T1,T2>(a,b);
}

class AlgebraicVector : public AlgebraicVectorExpression<AlgebraicVector>{
  std::vector<double> data_;

public:
  SizeType size() const {
    return data_.size();
  }

  ValueType operator[](SizeType ii) const {
    return data_[ii];
  }

  ValueType& operator[](SizeType ii) {
    return data_[ii];
  }

  AlgebraicVector(SizeType n) : data_(n,0.0) {
  };

  template< class T>
  AlgebraicVector(const AlgebraicVectorExpression<T>& vec) {
    const T& v = vec;
    data_.resize(v.size());
    for( SizeType idx = 0; idx != v.size(); ++idx) {
      data_[idx] = v[idx];
    }
  }
};

int main() {

  AlgebraicVector x(10);
  AlgebraicVector y(10);
  for (int ii = 0; ii != 10; ++ii)
    x[ii] = y[ii] = ii;    

  AlgebraicVector z(10);
  z = x + y;

  for(int ii = 0; ii != 10; ++ii)
    std::cout << z[ii] << std::endl;
  return 0;
}

In fact when I compile it with:

$ g++ --version
g++ (Ubuntu 4.4.3-4ubuntu5) 4.4.3
Copyright (C) 2009 Free Software Foundation, Inc.
This is free software; see the source for copying conditions.  There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.

$ g++ -O0 -g crtp.cc

I obtain:

$ ./a.out 
0
2
4
6
8
10
12
14
16
18

which is the expected behavior. When I use icpc:

$ icpc --version
icpc (ICC) 12.1.0 20110811
Copyright (C) 1985-2011 Intel Corporation.  All rights reserved.    
$ icpc -g -O0 crtp.cc

I obtain instead a Segmentation fault. Running

valgrind --tool=memcheck ./a.out

points to line 29 in the sources

AlgebraicVectorExpression<AlgebraicVector>::operator AlgebraicVector const&() const (crtp.cc:29)

As I am quite new to C++ and I spent quite a time searching for a bug without any result, I would like to ask the opinion of someone more experienced to understand if this issue is due to some error I introduced (as I expect) or to a compiler bug.

Edit: I changed the code as it is now, after the answer from Mike Seymour. Now I don't get compiler warnings, but I still get the same behavior as before (with the same valgrind response). Did anybody try to compile with Intel?

Edit: I tried to compile the code in the Expression Templates page of Wikipedia. I obtained the same exact behavior as with the example I provided.

Edit: I have investigated the issue further and it seems that compiling with Intel icpc the operator

operator const Derived&() const {
    return static_cast< const Derived& >(*this);
  }

recursively calls itself. One workaround I found is to substitute this operator with a method:

const Derived& get_ref() const {
    return static_cast< const Derived& >(*this);
  }

and modify the constructors of the various classes accordingly. Can anybody tell which of these two behaviors is right possibly pointing to the standard to explain that?

Was it helpful?

Solution

You should always enable compiler warnings; they can often spot subtle problems. In this case:

g++ -Wall -Wextra test.cpp
test.cpp: In member function ‘const typename AlgebraicVectorExpression<AlgebraicVectorSum<T1, T2> >::ValueType& AlgebraicVectorSum<T1, T2>::operator[](typename AlgebraicVectorExpression<AlgebraicVectorSum<T1, T2> >::SizeType) const [with T1 = AlgebraicVector, T2 = AlgebraicVector]’:
test.cpp:90:   instantiated from ‘AlgebraicVector::AlgebraicVector(const AlgebraicVectorExpression<T1>&) [with T = AlgebraicVectorSum<AlgebraicVector, AlgebraicVector>]’
test.cpp:103:   instantiated from here
test.cpp:52: warning: returning reference to temporary

This tells you the problem:

const ValueType& operator[](SizeType ii) const {
    return (a_[ii] + b_[ii]);
}

The result of the expression is a temporary, destroyed at the end of that line, so the function is returning a dangling reference to a non-existent object. This operator will have to return by value instead, and you shouldn't implement the non-const overload since there is no value to modify.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top