12 #include <Eigen/CXX11/Tensor> 16 template <
int DataLayout>
28 no_broadcast = tensor.broadcast(broadcasts);
35 for (
int i = 0;
i < 2; ++
i) {
36 for (
int j = 0;
j < 3; ++
j) {
37 for (
int k = 0; k < 5; ++k) {
38 for (
int l = 0;
l < 7; ++
l) {
50 broadcast = tensor.broadcast(broadcasts);
57 for (
int i = 0;
i < 4; ++
i) {
58 for (
int j = 0;
j < 9; ++
j) {
59 for (
int k = 0; k < 5; ++k) {
60 for (
int l = 0;
l < 28; ++
l) {
69 template <
int DataLayout>
80 broadcast = tensor.broadcast(broadcasts);
86 for (
int i = 0;
i < 16; ++
i) {
87 for (
int j = 0;
j < 9; ++
j) {
88 for (
int k = 0; k < 20; ++k) {
94 #if EIGEN_HAS_VARIADIC_TEMPLATES 105 broadcast = tensor.broadcast(broadcasts);
111 for (
int i = 0;
i < 22; ++
i) {
112 for (
int j = 0;
j < 9; ++
j) {
113 for (
int k = 0; k < 20; ++k) {
121 template <
int DataLayout>
127 #if defined(EIGEN_HAS_INDEX_LIST) 128 Eigen::IndexList<Eigen::type2index<2>, Eigen::type2index<3>, Eigen::type2index<4>> broadcasts;
137 broadcast = tensor.broadcast(broadcasts);
143 for (
int i = 0;
i < 16; ++
i) {
144 for (
int j = 0;
j < 9; ++
j) {
145 for (
int k = 0; k < 20; ++k) {
151 #if EIGEN_HAS_VARIADIC_TEMPLATES 162 broadcast = tensor.broadcast(broadcasts);
168 for (
int i = 0;
i < 22; ++
i) {
169 for (
int j = 0;
j < 9; ++
j) {
170 for (
int k = 0; k < 20; ++k) {
178 template <
int DataLayout>
186 t2 = t2.constant(20.0
f);
189 for (
int i = 0;
i < 10; ++
i) {
195 for (
int i = 0;
i < 10; ++
i) {
201 template <
int DataLayout>
212 broadcast = tensor.broadcast(broadcasts);
219 for (
int i = 0;
i < 9; ++
i) {
220 for (
int j = 0;
j < 13; ++
j) {
221 for (
int k = 0; k < 5; ++k) {
222 for (
int l = 0;
l < 7; ++
l) {
230 template <
int DataLayout>
241 broadcast = tensor.broadcast(broadcasts);
248 for (
int i = 0;
i < 7; ++
i) {
249 for (
int j = 0;
j < 3; ++
j) {
250 for (
int k = 0; k < 5; ++k) {
251 for (
int l = 0;
l < 19; ++
l) {
259 template <
int DataLayout>
269 broadcasted = tensor.broadcast(broadcasts);
275 for (
int i = 0;
i < 5; ++
i) {
276 for (
int j = 0;
j < 7; ++
j) {
277 for (
int k = 0; k < 13; ++k) {
284 template <
int DataLayout>
295 broadcast = tensor.broadcast(broadcasts);
302 for (
int i = 0;
i < 5; ++
i) {
303 for (
int j = 0;
j < 7; ++
j) {
304 for (
int k = 0; k < 13; ++k) {
305 for (
int l = 0;
l < 19; ++
l) {
323 CALL_SUBTEST(test_simple_broadcasting_one_by_n<RowMajor>());
324 CALL_SUBTEST(test_simple_broadcasting_n_by_one<RowMajor>());
325 CALL_SUBTEST(test_simple_broadcasting_one_by_n<ColMajor>());
326 CALL_SUBTEST(test_simple_broadcasting_n_by_one<ColMajor>());
327 CALL_SUBTEST(test_simple_broadcasting_one_by_n_by_one_1d<ColMajor>());
328 CALL_SUBTEST(test_simple_broadcasting_one_by_n_by_one_2d<ColMajor>());
329 CALL_SUBTEST(test_simple_broadcasting_one_by_n_by_one_1d<RowMajor>());
330 CALL_SUBTEST(test_simple_broadcasting_one_by_n_by_one_2d<RowMajor>());
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar * data()
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor< Scalar_, NumIndices_, Options_, IndexType_ > & setRandom()
static void test_simple_broadcasting_one_by_n()
static void test_simple_broadcasting()
static void test_vectorized_broadcasting()
#define VERIFY_IS_APPROX(a, b)
static const Line3 l(Rot3(), 1, 1)
#define VERIFY_IS_EQUAL(a, b)
A tensor expression mapping an existing array of data.
EIGEN_DECLARE_TEST(cxx11_tensor_broadcasting)
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
The fixed sized version of the tensor class.
static void test_simple_broadcasting_n_by_one()
EIGEN_DEVICE_FUNC void resize(const array< Index, NumIndices > &dimensions)
#define CALL_SUBTEST(FUNC)
static void test_static_broadcasting()
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index dimension(std::size_t n) const
static void test_simple_broadcasting_one_by_n_by_one_2d()
static void test_fixed_size_broadcasting()
static const int DataLayout
static void test_simple_broadcasting_one_by_n_by_one_1d()
broadcast_trivial broadcast(const std::array< buffer_info, N > &buffers, ssize_t &ndim, std::vector< ssize_t > &shape)