Remove local Index typedef from unit-tests

This commit is contained in:
Gael Guennebaud
2018-07-12 17:16:40 +02:00
parent 63185be8b2
commit 12e1ebb68b
65 changed files with 22 additions and 117 deletions

View File

@@ -19,8 +19,8 @@ template<typename MatrixType> void triangular_square(const MatrixType& m)
RealScalar largerEps = 10*test_precision<RealScalar>();
typename MatrixType::Index rows = m.rows();
typename MatrixType::Index cols = m.cols();
Index rows = m.rows();
Index cols = m.cols();
MatrixType m1 = MatrixType::Random(rows, cols),
m2 = MatrixType::Random(rows, cols),
@@ -134,7 +134,6 @@ template<typename MatrixType> void triangular_square(const MatrixType& m)
template<typename MatrixType> void triangular_rect(const MatrixType& m)
{
typedef const typename MatrixType::Index Index;
typedef typename MatrixType::Scalar Scalar;
typedef typename NumTraits<Scalar>::Real RealScalar;
enum { Rows = MatrixType::RowsAtCompileTime, Cols = MatrixType::ColsAtCompileTime };