From cc1bf0a1205668a099545578a296ea7df25e9930 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 13:35:03 -0700 Subject: [PATCH 01/39] add act backwards test --- tests/cpp/operator/mkldnn.cc | 85 +++++++++++++++++++++++++++++++++++- 1 file changed, 83 insertions(+), 2 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 82fee67b1141..7ba269902991 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -115,13 +115,14 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { if (is_rand) { data[i] = std::rand() - INT_MAX / 2; } else { - size_t shift = size >> 1; + int shift = size >> 1; data[i] = i - shift; } } using InitFunc = std::function; using VerifyFunc = std::function &in_arrs, const NDArray &arr)>; +using VerifyBackwardsFunc = std::function &in_arrs, const std::vector &in_grads)>; // Init arrays with the specified layout. static void InitMKLDNNArray(NDArray *arr, const mkldnn::memory::primitive_desc &pd, @@ -387,6 +388,17 @@ OpAttrs GetReluOp() { return attrs; } +OpAttrs GetReluBackwardsOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("_backward_Activation"); + attrs.attrs.dict.insert({"act_type", "relu"}); + attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.dispatches.resize(2); + attrs.dispatches[0] = DispatchMode::kFCompute; + attrs.dispatches[1] = DispatchMode::kFComputeEx; + return attrs; +} + OpAttrs GetLeakyReluOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("LeakyReLU"); @@ -603,7 +615,7 @@ void VerifyActResult(const std::vector &in_arrs, const NDArray &arr) mshadow::default_real_t *d2 = static_cast(blob2.dptr_); EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); for (size_t i = 0; i < tmp1.shape().Size(); i++) { - EXPECT_EQ(d1[i], std::fmax(d2[i], 0)); + EXPECT_EQ(std::fmax(d1[i], 0), d2[i]); } } @@ -621,6 +633,22 @@ void VerifySumResult(const std::vector &in_arrs, const NDArray &arr) ASSERT_EQ(d1[i] + d2[i], o[i]); } +void VerifyActBackwardsResult(const NDArray &out_grads, const std::vector &in_arrs, const std::vector &in_grads) { + NDArray tmp1 = out_grads.Reorder2Default(); + NDArray tmp2 = in_arrs[0]->Reorder2Default(); + NDArray tmp3 = in_grads[0]->Reorder2Default(); + TBlob blob1 = tmp1.data(); + TBlob blob2 = tmp2.data(); + TBlob blob3 = tmp3.data(); + mshadow::default_real_t *d1 = static_cast(blob1.dptr_); + mshadow::default_real_t *d2 = static_cast(blob2.dptr_); + mshadow::default_real_t *d3 = static_cast(blob3.dptr_); + EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); + for (size_t i = 0; i < tmp1.shape().Size(); i++) { + EXPECT_EQ(int(d2[i] > 0) * d1[i], d3[i]); + } +} + void PrintVerifyMsg(const NDArrayAttrs &arr1, const NDArrayAttrs &arr2) { TShape t1 = arr1.arr.shape(); TShape t2 = arr2.arr.shape(); @@ -702,6 +730,54 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { } } +void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyBackwardsFunc verify_fn) { + std::vector inputs(1); + std::vector outputs(1); + std::vector req(1); + std::vector dispatches = attrs.dispatches; + + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + std::vector in_arrs = GetTestInputArrays(init_fn); + for (auto in_arr : in_arrs) { + for (auto dispatch : dispatches) { + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); + for (auto out_arr : out_arrs) { + req[0] = kWriteTo; + inputs[0] = &in_arr.arr; // output grads + inputs[0] = &in_arr.arr; // input + outputs[0] = &out_arr.arr; + PrintVerifyMsg(in_arr, out_arr); + Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, + outputs, req, dispatch, mxnet::OpStatePtr()); + out_arr.arr.WaitToRead(); + verify_fn(*inputs[0], {inputs[1]}, *outputs[0]); + } + } + } + +// for (auto dispatch : dispatches) { +// in_arrs = GetTestInputArrays(init_fn); +// for (auto arr : in_arrs) { +// // If the array is a view, we shouldn't write data to it. +// if (arr.arr.IsView()) +// continue; +// +// NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); +// req[0] = kWriteInplace; +// inputs[0] = &arr.arr; +// outputs[0] = &arr.arr; +// PrintVerifyMsg(orig, arr); +// Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, +// dispatch, mxnet::OpStatePtr()); +// arr.arr.WaitToRead(); +// inputs[0] = &orig.arr; +// verify_fn(inputs, *outputs[0]); +// } +// } +} + void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(2); std::vector outputs(1); @@ -762,6 +838,11 @@ TEST(IMPERATIVE, ActOp) { TestUnaryOp(attrs, InitNegPosArray, VerifyActResult); } +TEST(IMPERATIVE, ActBackwardsOp) { + OpAttrs attrs = GetReluBackwardsOp(); + TestUnaryBackwardsOp(attrs, InitNegPosArray, VerifyActBackwardsResult); +} + TEST(IMPERATIVE, BinaryOp) { OpAttrs attrs = GetSumOp(); TestBinaryOp(attrs, VerifySumResult); From 92990a99d98eeb9bb696916e3db8fe04539aaf86 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 14:08:10 -0700 Subject: [PATCH 02/39] use only verifyfn template --- tests/cpp/operator/mkldnn.cc | 38 +++++++++++++++++------------------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 7ba269902991..39d1e47ce595 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -121,8 +121,7 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { } using InitFunc = std::function; -using VerifyFunc = std::function &in_arrs, const NDArray &arr)>; -using VerifyBackwardsFunc = std::function &in_arrs, const std::vector &in_grads)>; +using VerifyFunc = std::function &in_arrs, const std::vector &in_arrs)>; // Init arrays with the specified layout. static void InitMKLDNNArray(NDArray *arr, const mkldnn::memory::primitive_desc &pd, @@ -596,9 +595,9 @@ std::vector GetTestOutputArrays(const TShape &shape, return in_arrs; } -void VerifyCopyResult(const std::vector &in_arrs, const NDArray &arr) { +void VerifyCopyResult(const std::vector &in_arrs, const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); - NDArray tmp2 = arr.Reorder2Default(); + NDArray tmp2 = out_arrs[0]->Reorder2Default(); EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); TBlob d1 = tmp1.data(); TBlob d2 = tmp2.data(); @@ -606,9 +605,9 @@ void VerifyCopyResult(const std::vector &in_arrs, const NDArray &arr) tmp1.shape().Size() * sizeof(mshadow::default_real_t)), 0); } -void VerifyActResult(const std::vector &in_arrs, const NDArray &arr) { +void VerifyActResult(const std::vector &in_arrs, const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); - NDArray tmp2 = arr.Reorder2Default(); + NDArray tmp2 = out_arrs[0]->Reorder2Default(); TBlob blob1 = tmp1.data(); TBlob blob2 = tmp2.data(); mshadow::default_real_t *d1 = static_cast(blob1.dptr_); @@ -619,10 +618,10 @@ void VerifyActResult(const std::vector &in_arrs, const NDArray &arr) } } -void VerifySumResult(const std::vector &in_arrs, const NDArray &arr) { +void VerifySumResult(const std::vector &in_arrs, const std::vector &out_arrs) { NDArray in1 = in_arrs[0]->Reorder2Default(); NDArray in2 = in_arrs[1]->Reorder2Default(); - NDArray out = arr.Reorder2Default(); + NDArray out = out_arrs[0]->Reorder2Default(); EXPECT_EQ(in1.shape().Size(), in2.shape().Size()); EXPECT_EQ(in1.shape().Size(), out.shape().Size()); @@ -633,10 +632,10 @@ void VerifySumResult(const std::vector &in_arrs, const NDArray &arr) ASSERT_EQ(d1[i] + d2[i], o[i]); } -void VerifyActBackwardsResult(const NDArray &out_grads, const std::vector &in_arrs, const std::vector &in_grads) { - NDArray tmp1 = out_grads.Reorder2Default(); - NDArray tmp2 = in_arrs[0]->Reorder2Default(); - NDArray tmp3 = in_grads[0]->Reorder2Default(); +void VerifyActBackwardsResult(const std::vector &in_arrs, const std::vector &out_arrs) { + NDArray tmp1 = in_arrs[0]->Reorder2Default(); // out grads + NDArray tmp2 = in_arrs[1]->Reorder2Default(); // input + NDArray tmp3 = out_arrs[0]->Reorder2Default(); // input grads TBlob blob1 = tmp1.data(); TBlob blob2 = tmp2.data(); TBlob blob3 = tmp3.data(); @@ -678,7 +677,7 @@ TEST(MKLDNN_NDArray, CopyFrom) { MKLDNNStream::Get()->Submit(); std::vector inputs(1); inputs[0] = &in_arr.arr; - VerifyCopyResult(inputs, out_arr.arr); + VerifyCopyResult(inputs, {&out_arr.arr}); } } } @@ -704,7 +703,7 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); out_arr.arr.WaitToRead(); - verify_fn(inputs, *outputs[0]); + verify_fn(inputs, outputs); } } } @@ -725,12 +724,12 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { dispatch, mxnet::OpStatePtr()); arr.arr.WaitToRead(); inputs[0] = &orig.arr; - verify_fn(inputs, *outputs[0]); + verify_fn(inputs, outputs); } } } -void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyBackwardsFunc verify_fn) { +void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { std::vector inputs(1); std::vector outputs(1); std::vector req(1); @@ -752,11 +751,10 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyBackward Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); out_arr.arr.WaitToRead(); - verify_fn(*inputs[0], {inputs[1]}, *outputs[0]); + verify_fn(inputs, outputs); } } } - // for (auto dispatch : dispatches) { // in_arrs = GetTestInputArrays(init_fn); // for (auto arr : in_arrs) { @@ -800,7 +798,7 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); out_arr.arr.WaitToRead(); - verify_fn(inputs, out_arr.arr); + verify_fn(inputs, outputs); } } } @@ -823,7 +821,7 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector orig_inputs(2); orig_inputs[0] = &orig; orig_inputs[1] = &orig; - verify_fn(orig_inputs, arr.arr); + verify_fn(orig_inputs, outputs); } } } From 1e0488b643d766216f0a9b15e3cccdb4bf0ef6b1 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 14:09:26 -0700 Subject: [PATCH 03/39] fix param name --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 39d1e47ce595..316768c6a5ff 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -121,7 +121,7 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { } using InitFunc = std::function; -using VerifyFunc = std::function &in_arrs, const std::vector &in_arrs)>; +using VerifyFunc = std::function &in_arrs, const std::vector &out_arrs)>; // Init arrays with the specified layout. static void InitMKLDNNArray(NDArray *arr, const mkldnn::memory::primitive_desc &pd, From 9d7f30bce8f65a385e532c19a8b9b20c8ba9acf8 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 14:19:48 -0700 Subject: [PATCH 04/39] update number of inputs --- tests/cpp/operator/mkldnn.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 316768c6a5ff..8fbba138e980 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -730,7 +730,7 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { } void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { - std::vector inputs(1); + std::vector inputs(2); std::vector outputs(1); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -744,9 +744,9 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); for (auto out_arr : out_arrs) { req[0] = kWriteTo; - inputs[0] = &in_arr.arr; // output grads - inputs[0] = &in_arr.arr; // input - outputs[0] = &out_arr.arr; + inputs[0] = &out_arr.arr; // output grads + inputs[1] = &out_arr.arr; // input + outputs[0] = &in_arr.arr; PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); From 1ea1f110daec2c88fadfae933faa8916d8254097 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 14:37:28 -0700 Subject: [PATCH 05/39] fix assertion for act backwards --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 8fbba138e980..9e759b86c62f 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -644,7 +644,7 @@ void VerifyActBackwardsResult(const std::vector &in_arrs, const std:: mshadow::default_real_t *d3 = static_cast(blob3.dptr_); EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); for (size_t i = 0; i < tmp1.shape().Size(); i++) { - EXPECT_EQ(int(d2[i] > 0) * d1[i], d3[i]); + EXPECT_EQ(d2[i] > 0 ? d1[i] : 0, d3[i]); } } From 54766c371b157bb8731f1eb7a07d8a6434203a9c Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 14:45:58 -0700 Subject: [PATCH 06/39] limit rand num range --- tests/cpp/operator/mkldnn.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 9e759b86c62f..2227a2af63a9 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -98,7 +98,7 @@ static void InitDefaultArray(NDArray *arr, bool is_rand = false) { size_t size = blob.Size(); for (size_t i = 0; i < size; i++) { if (is_rand) { - data[i] = std::rand(); + data[i] = std::rand() % 100; } else { data[i] = i; } @@ -113,7 +113,7 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { for (int i = 0; i < size; i++) if (is_rand) { - data[i] = std::rand() - INT_MAX / 2; + data[i] = (std::rand() % 100) - 50; } else { int shift = size >> 1; data[i] = i - shift; From 96f19aa9f86e411a23b2b3d409bbc08a754d03ea Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:17:23 -0700 Subject: [PATCH 07/39] change to assert --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 2227a2af63a9..0bf032ada8bb 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -644,7 +644,7 @@ void VerifyActBackwardsResult(const std::vector &in_arrs, const std:: mshadow::default_real_t *d3 = static_cast(blob3.dptr_); EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); for (size_t i = 0; i < tmp1.shape().Size(); i++) { - EXPECT_EQ(d2[i] > 0 ? d1[i] : 0, d3[i]); + ASSERT_EQ(d2[i] > 0 ? d1[i] : 0, d3[i]); } } From 3b1d194cd2302b8bb2967bdaabdf82922823815d Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:22:06 -0700 Subject: [PATCH 08/39] wait to read on correct vector --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 0bf032ada8bb..55cca719f81d 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -702,7 +702,7 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); - out_arr.arr.WaitToRead(); + outputs[0]->WaitToRead(); verify_fn(inputs, outputs); } } From ea80b00b6b0922ebeadda28e13c3295b7ffb0718 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:29:51 -0700 Subject: [PATCH 09/39] add writeinplace test --- tests/cpp/operator/mkldnn.cc | 43 ++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 55cca719f81d..11768c6cdb46 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -755,25 +755,25 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver } } } -// for (auto dispatch : dispatches) { -// in_arrs = GetTestInputArrays(init_fn); -// for (auto arr : in_arrs) { -// // If the array is a view, we shouldn't write data to it. -// if (arr.arr.IsView()) -// continue; -// -// NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); -// req[0] = kWriteInplace; -// inputs[0] = &arr.arr; -// outputs[0] = &arr.arr; -// PrintVerifyMsg(orig, arr); -// Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, -// dispatch, mxnet::OpStatePtr()); -// arr.arr.WaitToRead(); -// inputs[0] = &orig.arr; -// verify_fn(inputs, *outputs[0]); -// } -// } + for (auto dispatch : dispatches) { + in_arrs = GetTestInputArrays(init_fn); + for (auto arr : in_arrs) { + // If the array is a view, we shouldn't write data to it. + if (arr.arr.IsView()) + continue; + + NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); + req[0] = kWriteInplace; + inputs[0] = &arr.arr; + inputs[1] = &arr.arr; + outputs[0] = &arr.arr; + PrintVerifyMsg(orig, arr); + Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, + dispatch, mxnet::OpStatePtr()); + arr.arr.WaitToRead(); + verify_fn({&orig.arr, &orig.arr}, *outputs[0]); + } + } } void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { @@ -831,6 +831,11 @@ TEST(IMPERATIVE, UnaryOp) { TestUnaryOp(attrs, InitDefaultArray, VerifyCopyResult); } +//TEST(IMPERATIVE, CopyBackwardsOp) { +// OpAttrs attrs = GetCopyBackwardsOp(); +// TestUnaryBackwardsOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); +//} + TEST(IMPERATIVE, ActOp) { OpAttrs attrs = GetReluOp(); TestUnaryOp(attrs, InitNegPosArray, VerifyActResult); From b72e47501cf0ec46632d94a6c36f12100c5c8a23 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:31:21 -0700 Subject: [PATCH 10/39] fix params --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 11768c6cdb46..d166a13dcdfc 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -771,7 +771,7 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); arr.arr.WaitToRead(); - verify_fn({&orig.arr, &orig.arr}, *outputs[0]); + verify_fn({&orig.arr, &orig.arr}, outputs); } } } From 83b6f4523cfde114da7edeaf17565c21358cc92a Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:36:24 -0700 Subject: [PATCH 11/39] add copy backwards test --- tests/cpp/operator/mkldnn.cc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index d166a13dcdfc..c0d8a2163d64 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -632,6 +632,14 @@ void VerifySumResult(const std::vector &in_arrs, const std::vector &in_arrs, const std::vector &out_arrs) { + NDArray tmp1 = out_arrs[0]->Reorder2Default(); + TBlob blob1 = tmp1.data(); + mshadow::default_real_t *d1 = static_cast(blob1.dptr_); + for (size_t i = 0; i < tmp1.shape().Size(); i++) + ASSERT_EQ(1, d1[i]); +} + void VerifyActBackwardsResult(const std::vector &in_arrs, const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); // out grads NDArray tmp2 = in_arrs[1]->Reorder2Default(); // input @@ -831,10 +839,10 @@ TEST(IMPERATIVE, UnaryOp) { TestUnaryOp(attrs, InitDefaultArray, VerifyCopyResult); } -//TEST(IMPERATIVE, CopyBackwardsOp) { -// OpAttrs attrs = GetCopyBackwardsOp(); -// TestUnaryBackwardsOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); -//} +TEST(IMPERATIVE, CopyBackwardsOp) { + OpAttrs attrs = GetCopyBackwardsOp(); + TestUnaryBackwardsOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); +} TEST(IMPERATIVE, ActOp) { OpAttrs attrs = GetReluOp(); From fe2388126d2a50b52ece1cfdd0102fae8a2ac8da Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:37:48 -0700 Subject: [PATCH 12/39] add missing fixture --- tests/cpp/operator/mkldnn.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index c0d8a2163d64..2382870aaf5a 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -376,6 +376,15 @@ OpAttrs GetCopyOp() { return attrs; } +OpAttrs GetCopyBackwardsOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("_backward_copy"); + attrs.dispatches.resize(2); + attrs.dispatches[0] = DispatchMode::kFCompute; + attrs.dispatches[1] = DispatchMode::kFComputeEx; + return attrs; +} + OpAttrs GetReluOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("Activation"); From 28a3409a7dca39a4423fcaa3e29a1eba7eb9c5f7 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 15:49:24 -0700 Subject: [PATCH 13/39] fix lint --- tests/cpp/operator/mkldnn.cc | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 2382870aaf5a..05fb98c27506 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -121,7 +121,8 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { } using InitFunc = std::function; -using VerifyFunc = std::function &in_arrs, const std::vector &out_arrs)>; +using VerifyFunc = std::function &in_arrs, + const std::vector &out_arrs)>; // Init arrays with the specified layout. static void InitMKLDNNArray(NDArray *arr, const mkldnn::memory::primitive_desc &pd, @@ -604,7 +605,8 @@ std::vector GetTestOutputArrays(const TShape &shape, return in_arrs; } -void VerifyCopyResult(const std::vector &in_arrs, const std::vector &out_arrs) { +void VerifyCopyResult(const std::vector &in_arrs, + const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); NDArray tmp2 = out_arrs[0]->Reorder2Default(); EXPECT_EQ(tmp1.shape().Size(), tmp2.shape().Size()); @@ -614,7 +616,8 @@ void VerifyCopyResult(const std::vector &in_arrs, const std::vector &in_arrs, const std::vector &out_arrs) { +void VerifyActResult(const std::vector &in_arrs, + const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); NDArray tmp2 = out_arrs[0]->Reorder2Default(); TBlob blob1 = tmp1.data(); @@ -627,7 +630,8 @@ void VerifyActResult(const std::vector &in_arrs, const std::vector &in_arrs, const std::vector &out_arrs) { +void VerifySumResult(const std::vector &in_arrs, + const std::vector &out_arrs) { NDArray in1 = in_arrs[0]->Reorder2Default(); NDArray in2 = in_arrs[1]->Reorder2Default(); NDArray out = out_arrs[0]->Reorder2Default(); @@ -641,7 +645,8 @@ void VerifySumResult(const std::vector &in_arrs, const std::vector &in_arrs, const std::vector &out_arrs) { +void VerifyCopyBackwardsResult(const std::vector &in_arrs, + const std::vector &out_arrs) { NDArray tmp1 = out_arrs[0]->Reorder2Default(); TBlob blob1 = tmp1.data(); mshadow::default_real_t *d1 = static_cast(blob1.dptr_); @@ -649,10 +654,11 @@ void VerifyCopyBackwardsResult(const std::vector &in_arrs, const std: ASSERT_EQ(1, d1[i]); } -void VerifyActBackwardsResult(const std::vector &in_arrs, const std::vector &out_arrs) { - NDArray tmp1 = in_arrs[0]->Reorder2Default(); // out grads - NDArray tmp2 = in_arrs[1]->Reorder2Default(); // input - NDArray tmp3 = out_arrs[0]->Reorder2Default(); // input grads +void VerifyActBackwardsResult(const std::vector &in_arrs, + const std::vector &out_arrs) { + NDArray tmp1 = in_arrs[0]->Reorder2Default(); // out grads + NDArray tmp2 = in_arrs[1]->Reorder2Default(); // input + NDArray tmp3 = out_arrs[0]->Reorder2Default(); // input grads TBlob blob1 = tmp1.data(); TBlob blob2 = tmp2.data(); TBlob blob3 = tmp3.data(); From 4832ab7667dc9b9cdfc419356778c409e6ec44c1 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 16:04:25 -0700 Subject: [PATCH 14/39] add sum backwards verify --- tests/cpp/operator/mkldnn.cc | 90 ++++++++++++++++++++++++++++++++++-- 1 file changed, 85 insertions(+), 5 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 05fb98c27506..019c65584166 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -426,6 +426,15 @@ OpAttrs GetSumOp() { return attrs; } +OpAttrs GetSumBackwardsOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("_backwards_elemwise_add"); + attrs.dispatches.resize(2); + attrs.dispatches[0] = DispatchMode::kFCompute; + attrs.dispatches[1] = DispatchMode::kFComputeEx; + return attrs; +} + /* * We want to get a few types of NDArrays for testing: * 1. Normal NDArray @@ -647,11 +656,12 @@ void VerifySumResult(const std::vector &in_arrs, void VerifyCopyBackwardsResult(const std::vector &in_arrs, const std::vector &out_arrs) { - NDArray tmp1 = out_arrs[0]->Reorder2Default(); - TBlob blob1 = tmp1.data(); - mshadow::default_real_t *d1 = static_cast(blob1.dptr_); - for (size_t i = 0; i < tmp1.shape().Size(); i++) - ASSERT_EQ(1, d1[i]); + NDArray output_grads = in_arrs[0]->Reorder2Default(); + NDArray input_grads = out_arrs[0]->Reorder2Default(); + mshadow::default_real_t *og = output_grads.data().dptr(); + mshadow::default_real_t *ig = input_grads.data().dptr(); + for (size_t i = 0; i < output_grads.shape().Size(); i++) + ASSERT_EQ(og[i], ig[i]); } void VerifyActBackwardsResult(const std::vector &in_arrs, @@ -671,6 +681,20 @@ void VerifyActBackwardsResult(const std::vector &in_arrs, } } +void VerifySumBackwardsResult(const std::vector &in_arrs, + const std::vector &out_arrs) { + NDArray out_grads = in_arrs[0]->Reorder2Default(); // out grads + NDArray input_grads1 = out_arrs[0]->Reorder2Default(); // input grads + NDArray input_grads2 = out_arrs[1]->Reorder2Default(); // input grads + mshadow::default_real_t *og = out_grads.data().dptr(); + mshadow::default_real_t *ig1 = input_grads1.data().dptr(); + mshadow::default_real_t *ig2 = input_grads2.data().dptr(); + for (size_t i = 0; i < out_grads.shape().Size(); i++) { + EXPECT_EQ(og[i], ig1[i]); + EXPECT_EQ(og[i], ig2[i]); + } +} + void PrintVerifyMsg(const NDArrayAttrs &arr1, const NDArrayAttrs &arr2) { TShape t1 = arr1.arr.shape(); TShape t2 = arr2.arr.shape(); @@ -849,6 +873,57 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } } +void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { + std::vector inputs(3); + std::vector outputs(1); + std::vector req(1); + std::vector dispatches = attrs.dispatches; + + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + std::vector in_arrs = GetTestInputArrays(InitDefaultArray); + for (auto in_arr1 : in_arrs) { + for (auto dispatch : dispatches) { + std::vector out_arrs = GetTestOutputArrays(in_arr1.arr.shape(), pds, + InitDefaultArray); + for (auto out_arr : out_arrs) { + req[0] = kWriteTo; + inputs[0] = &out_arr.arr; + inputs[1] = &out_arr.arr; + inputs[2] = &out_arr.arr; + outputs[0] = &in_arr1.arr; + Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, + outputs, req, dispatch, mxnet::OpStatePtr()); + outputs[0]->WaitToRead(); + verify_fn(inputs, outputs); + } + } + } + + for (auto dispatch : dispatches) { + in_arrs = GetTestInputArrays(InitDefaultArray); + for (auto arr : in_arrs) { + // If the array is a view, we shouldn't write data to it. + if (arr.arr.IsView()) + continue; + + NDArray orig = arr.arr.Copy(arr.arr.ctx()); + req[0] = kWriteInplace; + inputs[0] = &arr.arr; + inputs[1] = &arr.arr; + outputs[0] = &arr.arr; + Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, + dispatch, mxnet::OpStatePtr()); + arr.arr.WaitToRead(); + std::vector orig_inputs(2); + orig_inputs[0] = &orig; + orig_inputs[1] = &orig; + verify_fn({&orig, &orig, &orig}, outputs); + } + } +} + TEST(IMPERATIVE, UnaryOp) { OpAttrs attrs = GetCopyOp(); TestUnaryOp(attrs, InitDefaultArray, VerifyCopyResult); @@ -874,6 +949,11 @@ TEST(IMPERATIVE, BinaryOp) { TestBinaryOp(attrs, VerifySumResult); } +TEST(IMPERATIVE, BinaryBackwardsOp) { + OpAttrs attrs = GetSumBackwardsOp(); + TestBinaryBackwardsOp(attrs, VerifySumBackwardsResult); +} + void VerifySumMemory(mkldnn::memory in_mem1, mkldnn::memory in_mem2, mkldnn::memory out_mem) { float *in1 = static_cast(in_mem1.get_data_handle()); float *in2 = static_cast(in_mem2.get_data_handle()); From 6a61ac2eacb7b2894fdf904b9a3ab39106196daa Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 16:55:05 -0700 Subject: [PATCH 15/39] use correct num of inputs for sum backwards --- tests/cpp/operator/mkldnn.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 019c65584166..9df686502b34 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -428,7 +428,7 @@ OpAttrs GetSumOp() { OpAttrs GetSumBackwardsOp() { OpAttrs attrs; - attrs.attrs.op = Op::Get("_backwards_elemwise_add"); + attrs.attrs.op = Op::Get("_backward_add"); attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; @@ -875,7 +875,7 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(3); - std::vector outputs(1); + std::vector outputs(2); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -892,7 +892,10 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { inputs[0] = &out_arr.arr; inputs[1] = &out_arr.arr; inputs[2] = &out_arr.arr; - outputs[0] = &in_arr1.arr; + NDArray tmp1 = in_arr1.arr.Copy(in_arr1.arr.ctx()); + NDArray tmp2 = in_arr1.arr.Copy(in_arr1.arr.ctx()); + outputs[0] = &tmp1; + outputs[1] = &tmp2; Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); outputs[0]->WaitToRead(); From 7d4b9b31708a7604b5c08f7da018470f5f73c829 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 17:01:03 -0700 Subject: [PATCH 16/39] switch input / output --- tests/cpp/operator/mkldnn.cc | 58 ++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 9df686502b34..fa8d97b02ecf 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -791,9 +791,9 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); for (auto out_arr : out_arrs) { req[0] = kWriteTo; - inputs[0] = &out_arr.arr; // output grads - inputs[1] = &out_arr.arr; // input - outputs[0] = &in_arr.arr; + inputs[0] = &in_arr.arr; // output grads + inputs[1] = &in_arr.arr; // input + outputs[0] = &out_arr.arr; PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); @@ -889,11 +889,11 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { InitDefaultArray); for (auto out_arr : out_arrs) { req[0] = kWriteTo; - inputs[0] = &out_arr.arr; - inputs[1] = &out_arr.arr; - inputs[2] = &out_arr.arr; - NDArray tmp1 = in_arr1.arr.Copy(in_arr1.arr.ctx()); - NDArray tmp2 = in_arr1.arr.Copy(in_arr1.arr.ctx()); + inputs[0] = &in_arr1.arr; + inputs[1] = &in_arr1.arr; + inputs[2] = &in_arr1.arr; + NDArray tmp1 = out_arr.arr.Copy(in_arr1.arr.ctx()); + NDArray tmp2 = out_arr.arr.Copy(in_arr1.arr.ctx()); outputs[0] = &tmp1; outputs[1] = &tmp2; Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, @@ -904,27 +904,27 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } } - for (auto dispatch : dispatches) { - in_arrs = GetTestInputArrays(InitDefaultArray); - for (auto arr : in_arrs) { - // If the array is a view, we shouldn't write data to it. - if (arr.arr.IsView()) - continue; - - NDArray orig = arr.arr.Copy(arr.arr.ctx()); - req[0] = kWriteInplace; - inputs[0] = &arr.arr; - inputs[1] = &arr.arr; - outputs[0] = &arr.arr; - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, - dispatch, mxnet::OpStatePtr()); - arr.arr.WaitToRead(); - std::vector orig_inputs(2); - orig_inputs[0] = &orig; - orig_inputs[1] = &orig; - verify_fn({&orig, &orig, &orig}, outputs); - } - } +// for (auto dispatch : dispatches) { +// in_arrs = GetTestInputArrays(InitDefaultArray); +// for (auto arr : in_arrs) { +// // If the array is a view, we shouldn't write data to it. +// if (arr.arr.IsView()) +// continue; +// +// NDArray orig = arr.arr.Copy(arr.arr.ctx()); +// req[0] = kWriteInplace; +// inputs[0] = &arr.arr; +// inputs[1] = &arr.arr; +// outputs[0] = &arr.arr; +// Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, +// dispatch, mxnet::OpStatePtr()); +// arr.arr.WaitToRead(); +// std::vector orig_inputs(2); +// orig_inputs[0] = &orig; +// orig_inputs[1] = &orig; +// verify_fn({&orig, &orig, &orig}, outputs); +// } +// } } TEST(IMPERATIVE, UnaryOp) { From 9e71415a7459a0a0108952351a22d2486145ea59 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 11 Jun 2018 17:02:09 -0700 Subject: [PATCH 17/39] wait for both outputs --- tests/cpp/operator/mkldnn.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index fa8d97b02ecf..cdcb007671da 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -899,6 +899,7 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); outputs[0]->WaitToRead(); + outputs[1]->WaitToRead(); verify_fn(inputs, outputs); } } From c3c8a967a0e50f1434fcedf0be24562ccd60bfe3 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 00:12:22 -0700 Subject: [PATCH 18/39] limit input/output --- tests/cpp/operator/mkldnn.cc | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index cdcb007671da..8bd2114d2286 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -366,6 +366,9 @@ struct NDArrayAttrs { struct OpAttrs { nnvm::NodeAttrs attrs; std::vector dispatches; + // for backward operators + int num_inputs; + int num_outputs; }; OpAttrs GetCopyOp() { @@ -380,6 +383,7 @@ OpAttrs GetCopyOp() { OpAttrs GetCopyBackwardsOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("_backward_copy"); + attrs.num_inputs = 1; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; @@ -777,8 +781,8 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { } void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { - std::vector inputs(2); - std::vector outputs(1); + std::vector inputs(attrs.num_inputs || 2); + std::vector outputs(attrs.num_outputs || 1); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -874,8 +878,8 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { - std::vector inputs(3); - std::vector outputs(2); + std::vector inputs(attrs.num_inputs || 3); + std::vector outputs(attrs.num_inputs || 2); std::vector req(1); std::vector dispatches = attrs.dispatches; From dc17fa265c384415fc4c20f93eb1e10bc7d2008e Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 00:17:20 -0700 Subject: [PATCH 19/39] limit input/outputs for relu/sum --- tests/cpp/operator/mkldnn.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 8bd2114d2286..150ac58f6e3d 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -384,6 +384,7 @@ OpAttrs GetCopyBackwardsOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("_backward_copy"); attrs.num_inputs = 1; + attrs.num_outputs = 1; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; @@ -406,6 +407,7 @@ OpAttrs GetReluBackwardsOp() { attrs.attrs.op = Op::Get("_backward_Activation"); attrs.attrs.dict.insert({"act_type", "relu"}); attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.num_inputs = 2; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; @@ -433,6 +435,8 @@ OpAttrs GetSumOp() { OpAttrs GetSumBackwardsOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("_backward_add"); + attrs.num_inputs = 1; + attrs.num_outputs = 2; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; From da1392834ed8be8b50b238dd057529b5130c4f14 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 00:31:55 -0700 Subject: [PATCH 20/39] fix var source --- tests/cpp/operator/mkldnn.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 150ac58f6e3d..ea9a8bb5eb46 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -785,8 +785,8 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { } void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { - std::vector inputs(attrs.num_inputs || 2); - std::vector outputs(attrs.num_outputs || 1); + std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 2); + std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 1); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -882,8 +882,8 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { - std::vector inputs(attrs.num_inputs || 3); - std::vector outputs(attrs.num_inputs || 2); + std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 3); + std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 2); std::vector req(1); std::vector dispatches = attrs.dispatches; From 49d432abe73103bec936ef1e934ebe10db2c94f9 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 14:46:08 -0700 Subject: [PATCH 21/39] reorder backwards if view --- src/operator/nn/mkldnn/mkldnn_act.cc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/operator/nn/mkldnn/mkldnn_act.cc b/src/operator/nn/mkldnn/mkldnn_act.cc index fae72bd92211..b21d1238f7aa 100644 --- a/src/operator/nn/mkldnn/mkldnn_act.cc +++ b/src/operator/nn/mkldnn/mkldnn_act.cc @@ -184,14 +184,22 @@ void MKLDNNActivationBackward(const nnvm::NodeAttrs& attrs, const OpContext &ctx return; } + NDArray out_buffer = out_grad; + if (out_grad.IsView() && out_grad.IsMKLDNNData()) + out_buffer = out_grad.Reorder2Default(); + + NDArray in_buffer = in_data; + if (in_data.IsView() && in_data.IsMKLDNNData()) + in_buffer = in_data.Reorder2Default(); + const ActivationParam& param = nnvm::get(attrs.parsed); TmpMemMgr::Get()->Init(ctx.requested[activation::kTempSpace]); - auto diff_dst_memory = out_grad.GetMKLDNNData(); - auto input_mem = in_data.GetMKLDNNData(); + auto diff_dst_memory = out_buffer.GetMKLDNNData(); + auto input_mem = in_buffer.GetMKLDNNData(); // We need to make sure the two inputs to eltwise_backward has the same memory // descriptor. Otherwise, the perf will suffer. if (input_mem->get_primitive_desc() != diff_dst_memory->get_primitive_desc()) - input_mem = in_data.GetMKLDNNDataReorder(diff_dst_memory->get_primitive_desc()); + input_mem = in_buffer.GetMKLDNNDataReorder(diff_dst_memory->get_primitive_desc()); mkldnn::memory::primitive_desc data_mpd = input_mem->get_primitive_desc(); mkldnn::memory::desc data_md = data_mpd.desc(); mkldnn::memory::desc diff_md = diff_dst_memory->get_primitive_desc().desc(); @@ -201,7 +209,7 @@ void MKLDNNActivationBackward(const nnvm::NodeAttrs& attrs, const OpContext &ctx auto alg = GetMKLDNNActAlgo(param); mkldnn_output_t diff_src_memory; - MSHADOW_REAL_TYPE_SWITCH(in_data.dtype(), DType, { + MSHADOW_REAL_TYPE_SWITCH(in_buffer.dtype(), DType, { DType alpha = 0; mkldnn::eltwise_forward::desc fw_desc(mkldnn::prop_kind::forward_training, alg, data_md, alpha); From 804c7dec5e8d60802d51c7c8ad9a54d4d75dead5 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 15:19:02 -0700 Subject: [PATCH 22/39] add another entry to reqs in ttest --- tests/cpp/operator/mkldnn.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index ea9a8bb5eb46..6856809c0f0e 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -884,7 +884,7 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 3); std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 2); - std::vector req(1); + std::vector req(2); std::vector dispatches = attrs.dispatches; TestArrayShapes tas = GetTestArrayShapes(); @@ -897,6 +897,7 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { InitDefaultArray); for (auto out_arr : out_arrs) { req[0] = kWriteTo; + req[1] = kWriteTo; inputs[0] = &in_arr1.arr; inputs[1] = &in_arr1.arr; inputs[2] = &in_arr1.arr; From 33f25f9b5fdda99db066ac3abd0414127203ea86 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 15:25:29 -0700 Subject: [PATCH 23/39] uncomment write in place sumbackwards --- tests/cpp/operator/mkldnn.cc | 49 ++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 6856809c0f0e..781de7ef97dd 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -850,6 +850,7 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { inputs[0] = &in_arr1.arr; inputs[1] = &in_arr1.arr; outputs[0] = &out_arr.arr; + PrintVerifyMsg(in_arr1, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); out_arr.arr.WaitToRead(); @@ -865,17 +866,18 @@ void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { if (arr.arr.IsView()) continue; - NDArray orig = arr.arr.Copy(arr.arr.ctx()); + NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); req[0] = kWriteInplace; inputs[0] = &arr.arr; inputs[1] = &arr.arr; outputs[0] = &arr.arr; + PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); arr.arr.WaitToRead(); std::vector orig_inputs(2); - orig_inputs[0] = &orig; - orig_inputs[1] = &orig; + orig_inputs[0] = &orig.arr; + orig_inputs[1] = &orig.arr; verify_fn(orig_inputs, outputs); } } @@ -905,6 +907,7 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { NDArray tmp2 = out_arr.arr.Copy(in_arr1.arr.ctx()); outputs[0] = &tmp1; outputs[1] = &tmp2; + PrintVerifyMsg(in_arr1, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); outputs[0]->WaitToRead(); @@ -914,27 +917,25 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } } -// for (auto dispatch : dispatches) { -// in_arrs = GetTestInputArrays(InitDefaultArray); -// for (auto arr : in_arrs) { -// // If the array is a view, we shouldn't write data to it. -// if (arr.arr.IsView()) -// continue; -// -// NDArray orig = arr.arr.Copy(arr.arr.ctx()); -// req[0] = kWriteInplace; -// inputs[0] = &arr.arr; -// inputs[1] = &arr.arr; -// outputs[0] = &arr.arr; -// Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, -// dispatch, mxnet::OpStatePtr()); -// arr.arr.WaitToRead(); -// std::vector orig_inputs(2); -// orig_inputs[0] = &orig; -// orig_inputs[1] = &orig; -// verify_fn({&orig, &orig, &orig}, outputs); -// } -// } + for (auto dispatch : dispatches) { + in_arrs = GetTestInputArrays(InitDefaultArray); + for (auto arr : in_arrs) { + // If the array is a view, we shouldn't write data to it. + if (arr.arr.IsView()) + continue; + + NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); + req[0] = kWriteInplace; + inputs[0] = &arr.arr; + inputs[1] = &arr.arr; + outputs[0] = &arr.arr; + PrintVerifyMsg(orig, arr); + Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, + dispatch, mxnet::OpStatePtr()); + arr.arr.WaitToRead(); + verify_fn({&orig.arr, &orig.arr}, outputs); + } + } } TEST(IMPERATIVE, UnaryOp) { From 2b44d94db910fa91cfec2782f5c72fb86b982b6a Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 15:39:59 -0700 Subject: [PATCH 24/39] refactor testunary and testbinary into testop --- tests/cpp/operator/mkldnn.cc | 80 ++++++++---------------------------- 1 file changed, 18 insertions(+), 62 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 781de7ef97dd..40968e8e08f3 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -698,8 +698,8 @@ void VerifySumBackwardsResult(const std::vector &in_arrs, mshadow::default_real_t *ig1 = input_grads1.data().dptr(); mshadow::default_real_t *ig2 = input_grads2.data().dptr(); for (size_t i = 0; i < out_grads.shape().Size(); i++) { - EXPECT_EQ(og[i], ig1[i]); - EXPECT_EQ(og[i], ig2[i]); + ASSERT_EQ(og[i], ig1[i]); + ASSERT_EQ(og[i], ig2[i]); } } @@ -737,8 +737,8 @@ TEST(MKLDNN_NDArray, CopyFrom) { } } -void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { - std::vector inputs(1); +void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn, const int num_inputs) { + std::vector inputs(num_inputs); std::vector outputs(1); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -752,7 +752,8 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); for (auto out_arr : out_arrs) { req[0] = kWriteTo; - inputs[0] = &in_arr.arr; + for (int i = 0; i < num_inputs; i++) + inputs[i] = &in_arr.arr; outputs[0] = &out_arr.arr; PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, @@ -772,14 +773,18 @@ void TestUnaryOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); req[0] = kWriteInplace; - inputs[0] = &arr.arr; + for (int i = 0; i < num_inputs; i++) + inputs[i] = &arr.arr; outputs[0] = &arr.arr; PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); arr.arr.WaitToRead(); inputs[0] = &orig.arr; - verify_fn(inputs, outputs); + std::vector orig_inputs(num_inputs); + for (int i = 0; i < num_inputs; i++) + orig_inputs[i] = &orig.arr; + verify_fn(orig_inputs, outputs); } } } @@ -819,9 +824,12 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); req[0] = kWriteInplace; + req[1] = kWriteInplace; inputs[0] = &arr.arr; inputs[1] = &arr.arr; + inputs[2] = &arr.arr; outputs[0] = &arr.arr; + outputs[1] = &arr.arr; PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); @@ -831,58 +839,6 @@ void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc ver } } -void TestBinaryOp(const OpAttrs &attrs, VerifyFunc verify_fn) { - std::vector inputs(2); - std::vector outputs(1); - std::vector req(1); - std::vector dispatches = attrs.dispatches; - - TestArrayShapes tas = GetTestArrayShapes(); - std::vector pds = tas.pds; - - std::vector in_arrs = GetTestInputArrays(InitDefaultArray); - for (auto in_arr1 : in_arrs) { - for (auto dispatch : dispatches) { - std::vector out_arrs = GetTestOutputArrays(in_arr1.arr.shape(), pds, - InitDefaultArray); - for (auto out_arr : out_arrs) { - req[0] = kWriteTo; - inputs[0] = &in_arr1.arr; - inputs[1] = &in_arr1.arr; - outputs[0] = &out_arr.arr; - PrintVerifyMsg(in_arr1, out_arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, - outputs, req, dispatch, mxnet::OpStatePtr()); - out_arr.arr.WaitToRead(); - verify_fn(inputs, outputs); - } - } - } - - for (auto dispatch : dispatches) { - in_arrs = GetTestInputArrays(InitDefaultArray); - for (auto arr : in_arrs) { - // If the array is a view, we shouldn't write data to it. - if (arr.arr.IsView()) - continue; - - NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); - req[0] = kWriteInplace; - inputs[0] = &arr.arr; - inputs[1] = &arr.arr; - outputs[0] = &arr.arr; - PrintVerifyMsg(orig, arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, - dispatch, mxnet::OpStatePtr()); - arr.arr.WaitToRead(); - std::vector orig_inputs(2); - orig_inputs[0] = &orig.arr; - orig_inputs[1] = &orig.arr; - verify_fn(orig_inputs, outputs); - } - } -} - void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 3); std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 2); @@ -940,7 +896,7 @@ void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { TEST(IMPERATIVE, UnaryOp) { OpAttrs attrs = GetCopyOp(); - TestUnaryOp(attrs, InitDefaultArray, VerifyCopyResult); + TestOp(attrs, InitDefaultArray, VerifyCopyResult, 1); } TEST(IMPERATIVE, CopyBackwardsOp) { @@ -950,7 +906,7 @@ TEST(IMPERATIVE, CopyBackwardsOp) { TEST(IMPERATIVE, ActOp) { OpAttrs attrs = GetReluOp(); - TestUnaryOp(attrs, InitNegPosArray, VerifyActResult); + TestOp(attrs, InitNegPosArray, VerifyActResult, 1); } TEST(IMPERATIVE, ActBackwardsOp) { @@ -960,7 +916,7 @@ TEST(IMPERATIVE, ActBackwardsOp) { TEST(IMPERATIVE, BinaryOp) { OpAttrs attrs = GetSumOp(); - TestBinaryOp(attrs, VerifySumResult); + TestOp(attrs, InitDefaultArray, VerifySumResult, 2); } TEST(IMPERATIVE, BinaryBackwardsOp) { From 4e42d4bab7e7b5696d7647b8360ecd3e962ba6bf Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 15:47:57 -0700 Subject: [PATCH 25/39] remove special testbackwardsop and use testop --- tests/cpp/operator/mkldnn.cc | 147 +++++------------------------------ 1 file changed, 20 insertions(+), 127 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 40968e8e08f3..e0e70cb323c6 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -375,6 +375,8 @@ OpAttrs GetCopyOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("_copy"); attrs.dispatches.resize(2); + attrs.num_inputs = 1; + attrs.num_outputs = 1; attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; return attrs; @@ -396,6 +398,8 @@ OpAttrs GetReluOp() { attrs.attrs.op = Op::Get("Activation"); attrs.attrs.dict.insert({"act_type", "relu"}); attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.num_inputs = 1; + attrs.num_outputs = 1; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; @@ -408,25 +412,19 @@ OpAttrs GetReluBackwardsOp() { attrs.attrs.dict.insert({"act_type", "relu"}); attrs.attrs.op->attr_parser(&attrs.attrs); attrs.num_inputs = 2; + attrs.num_outputs = 1; attrs.dispatches.resize(2); attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; return attrs; } -OpAttrs GetLeakyReluOp() { - OpAttrs attrs; - attrs.attrs.op = Op::Get("LeakyReLU"); - attrs.dispatches.resize(1); - attrs.dispatches[0] = DispatchMode::kFCompute; - return attrs; -} - - OpAttrs GetSumOp() { OpAttrs attrs; attrs.attrs.op = Op::Get("elemwise_add"); attrs.dispatches.resize(2); + attrs.num_inputs = 2; + attrs.num_outputs = 1; attrs.dispatches[0] = DispatchMode::kFCompute; attrs.dispatches[1] = DispatchMode::kFComputeEx; return attrs; @@ -737,9 +735,9 @@ TEST(MKLDNN_NDArray, CopyFrom) { } } -void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn, const int num_inputs) { - std::vector inputs(num_inputs); - std::vector outputs(1); +void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { + std::vector inputs(attrs.num_inputs); + std::vector outputs(attrs.num_outputs); std::vector req(1); std::vector dispatches = attrs.dispatches; @@ -752,7 +750,7 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn, const std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); for (auto out_arr : out_arrs) { req[0] = kWriteTo; - for (int i = 0; i < num_inputs; i++) + for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &in_arr.arr; outputs[0] = &out_arr.arr; PrintVerifyMsg(in_arr, out_arr); @@ -773,7 +771,7 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn, const NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); req[0] = kWriteInplace; - for (int i = 0; i < num_inputs; i++) + for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &arr.arr; outputs[0] = &arr.arr; PrintVerifyMsg(orig, arr); @@ -781,147 +779,42 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn, const dispatch, mxnet::OpStatePtr()); arr.arr.WaitToRead(); inputs[0] = &orig.arr; - std::vector orig_inputs(num_inputs); - for (int i = 0; i < num_inputs; i++) + std::vector orig_inputs(attrs.num_inputs); + for (int i = 0; i < attrs.num_inputs; i++) orig_inputs[i] = &orig.arr; verify_fn(orig_inputs, outputs); } } } -void TestUnaryBackwardsOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { - std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 2); - std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 1); - std::vector req(1); - std::vector dispatches = attrs.dispatches; - - TestArrayShapes tas = GetTestArrayShapes(); - std::vector pds = tas.pds; - - std::vector in_arrs = GetTestInputArrays(init_fn); - for (auto in_arr : in_arrs) { - for (auto dispatch : dispatches) { - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); - for (auto out_arr : out_arrs) { - req[0] = kWriteTo; - inputs[0] = &in_arr.arr; // output grads - inputs[1] = &in_arr.arr; // input - outputs[0] = &out_arr.arr; - PrintVerifyMsg(in_arr, out_arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, - outputs, req, dispatch, mxnet::OpStatePtr()); - out_arr.arr.WaitToRead(); - verify_fn(inputs, outputs); - } - } - } - for (auto dispatch : dispatches) { - in_arrs = GetTestInputArrays(init_fn); - for (auto arr : in_arrs) { - // If the array is a view, we shouldn't write data to it. - if (arr.arr.IsView()) - continue; - - NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); - req[0] = kWriteInplace; - req[1] = kWriteInplace; - inputs[0] = &arr.arr; - inputs[1] = &arr.arr; - inputs[2] = &arr.arr; - outputs[0] = &arr.arr; - outputs[1] = &arr.arr; - PrintVerifyMsg(orig, arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, - dispatch, mxnet::OpStatePtr()); - arr.arr.WaitToRead(); - verify_fn({&orig.arr, &orig.arr}, outputs); - } - } -} - -void TestBinaryBackwardsOp(const OpAttrs &attrs, VerifyFunc verify_fn) { - std::vector inputs(attrs.num_inputs != 0 ? attrs.num_inputs : 3); - std::vector outputs(attrs.num_outputs != 0 ? attrs.num_outputs : 2); - std::vector req(2); - std::vector dispatches = attrs.dispatches; - - TestArrayShapes tas = GetTestArrayShapes(); - std::vector pds = tas.pds; - - std::vector in_arrs = GetTestInputArrays(InitDefaultArray); - for (auto in_arr1 : in_arrs) { - for (auto dispatch : dispatches) { - std::vector out_arrs = GetTestOutputArrays(in_arr1.arr.shape(), pds, - InitDefaultArray); - for (auto out_arr : out_arrs) { - req[0] = kWriteTo; - req[1] = kWriteTo; - inputs[0] = &in_arr1.arr; - inputs[1] = &in_arr1.arr; - inputs[2] = &in_arr1.arr; - NDArray tmp1 = out_arr.arr.Copy(in_arr1.arr.ctx()); - NDArray tmp2 = out_arr.arr.Copy(in_arr1.arr.ctx()); - outputs[0] = &tmp1; - outputs[1] = &tmp2; - PrintVerifyMsg(in_arr1, out_arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, - outputs, req, dispatch, mxnet::OpStatePtr()); - outputs[0]->WaitToRead(); - outputs[1]->WaitToRead(); - verify_fn(inputs, outputs); - } - } - } - - for (auto dispatch : dispatches) { - in_arrs = GetTestInputArrays(InitDefaultArray); - for (auto arr : in_arrs) { - // If the array is a view, we shouldn't write data to it. - if (arr.arr.IsView()) - continue; - - NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); - req[0] = kWriteInplace; - inputs[0] = &arr.arr; - inputs[1] = &arr.arr; - outputs[0] = &arr.arr; - PrintVerifyMsg(orig, arr); - Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, - dispatch, mxnet::OpStatePtr()); - arr.arr.WaitToRead(); - verify_fn({&orig.arr, &orig.arr}, outputs); - } - } -} - TEST(IMPERATIVE, UnaryOp) { OpAttrs attrs = GetCopyOp(); - TestOp(attrs, InitDefaultArray, VerifyCopyResult, 1); + TestOp(attrs, InitDefaultArray, VerifyCopyResult); } TEST(IMPERATIVE, CopyBackwardsOp) { OpAttrs attrs = GetCopyBackwardsOp(); - TestUnaryBackwardsOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); + TestOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); } TEST(IMPERATIVE, ActOp) { OpAttrs attrs = GetReluOp(); - TestOp(attrs, InitNegPosArray, VerifyActResult, 1); + TestOp(attrs, InitNegPosArray, VerifyActResult); } TEST(IMPERATIVE, ActBackwardsOp) { OpAttrs attrs = GetReluBackwardsOp(); - TestUnaryBackwardsOp(attrs, InitNegPosArray, VerifyActBackwardsResult); + TestOp(attrs, InitNegPosArray, VerifyActBackwardsResult); } TEST(IMPERATIVE, BinaryOp) { OpAttrs attrs = GetSumOp(); - TestOp(attrs, InitDefaultArray, VerifySumResult, 2); + TestOp(attrs, InitDefaultArray, VerifySumResult); } TEST(IMPERATIVE, BinaryBackwardsOp) { OpAttrs attrs = GetSumBackwardsOp(); - TestBinaryBackwardsOp(attrs, VerifySumBackwardsResult); + TestOp(attrs, InitDefaultArray, VerifySumBackwardsResult); } void VerifySumMemory(mkldnn::memory in_mem1, mkldnn::memory in_mem2, mkldnn::memory out_mem) { From 77dc89c3ecb0327cb749e994365cf785c76a82b5 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Fri, 15 Jun 2018 15:52:36 -0700 Subject: [PATCH 26/39] fill reqs vector with num of outputs --- tests/cpp/operator/mkldnn.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index e0e70cb323c6..37573d06b26f 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -738,7 +738,7 @@ TEST(MKLDNN_NDArray, CopyFrom) { void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs); std::vector outputs(attrs.num_outputs); - std::vector req(1); + std::vector req(attrs.num_inputs); std::vector dispatches = attrs.dispatches; TestArrayShapes tas = GetTestArrayShapes(); @@ -749,9 +749,11 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { for (auto dispatch : dispatches) { std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); for (auto out_arr : out_arrs) { - req[0] = kWriteTo; for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &in_arr.arr; + for (int i = 0; i < attrs.num_outputs; i++) + req[i] = kWriteTo; + outputs[0] = &out_arr.arr; PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, @@ -768,11 +770,11 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { // If the array is a view, we shouldn't write data to it. if (arr.arr.IsView()) continue; - NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); - req[0] = kWriteInplace; for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &arr.arr; + for (int i = 0; i < attrs.num_outputs; i++) + req[i] = kWriteInplace; outputs[0] = &arr.arr; PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, From b2d73f8acd3f6c6c9862c97318c0ff1385ee0492 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 10:03:58 -0700 Subject: [PATCH 27/39] change req size to num outputs --- tests/cpp/operator/mkldnn.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 37573d06b26f..48dc10cced8e 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -738,7 +738,7 @@ TEST(MKLDNN_NDArray, CopyFrom) { void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs); std::vector outputs(attrs.num_outputs); - std::vector req(attrs.num_inputs); + std::vector req(attrs.num_outputs); std::vector dispatches = attrs.dispatches; TestArrayShapes tas = GetTestArrayShapes(); From 76db6a69d96c5887b3ae60f336927bcfa1ffc0ce Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 10:15:03 -0700 Subject: [PATCH 28/39] create mulitple output ndarrays --- tests/cpp/operator/mkldnn.cc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 48dc10cced8e..a7a7d5058321 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -751,10 +751,10 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { for (auto out_arr : out_arrs) { for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &in_arr.arr; - for (int i = 0; i < attrs.num_outputs; i++) + for (int i = 0; i < attrs.num_outputs; i++) { req[i] = kWriteTo; - - outputs[0] = &out_arr.arr; + outputs[i] = &out_arr.arr; + } PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); @@ -773,9 +773,10 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { NDArrayAttrs orig(arr.arr.Copy(arr.arr.ctx()), "InPlace Copy"); for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &arr.arr; - for (int i = 0; i < attrs.num_outputs; i++) + for (int i = 0; i < attrs.num_outputs; i++) { req[i] = kWriteInplace; - outputs[0] = &arr.arr; + outputs[i] = &arr.arr; + } PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); From 528e51595ce4581ca49d132e350c2ae1d52dcfc2 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 11:49:13 -0700 Subject: [PATCH 29/39] wait for all outputs --- tests/cpp/operator/mkldnn.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index a7a7d5058321..df7fb960d77b 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -758,7 +758,8 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { PrintVerifyMsg(in_arr, out_arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); - outputs[0]->WaitToRead(); + for (auto output : outputs) + output->WaitToRead(); verify_fn(inputs, outputs); } } @@ -780,8 +781,8 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { PrintVerifyMsg(orig, arr); Imperative::Get()->InvokeOp(Context(), attrs.attrs, inputs, outputs, req, dispatch, mxnet::OpStatePtr()); - arr.arr.WaitToRead(); - inputs[0] = &orig.arr; + for (auto output : outputs) + output->WaitToRead(); std::vector orig_inputs(attrs.num_inputs); for (int i = 0; i < attrs.num_inputs; i++) orig_inputs[i] = &orig.arr; From 2a54003b67a0905349b72439c3ccc66ffde0469d Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 12:15:08 -0700 Subject: [PATCH 30/39] remove unused comments --- tests/cpp/operator/mkldnn.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index df7fb960d77b..ce8fcb4667b5 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -366,7 +366,6 @@ struct NDArrayAttrs { struct OpAttrs { nnvm::NodeAttrs attrs; std::vector dispatches; - // for backward operators int num_inputs; int num_outputs; }; From b977341cfafeb3351b6304a4c9cc4dfcc60795cb Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 12:51:34 -0700 Subject: [PATCH 31/39] remove redundant VerifyCopyResult method --- tests/cpp/operator/mkldnn.cc | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index ce8fcb4667b5..16ca52f154ce 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -659,16 +659,6 @@ void VerifySumResult(const std::vector &in_arrs, ASSERT_EQ(d1[i] + d2[i], o[i]); } -void VerifyCopyBackwardsResult(const std::vector &in_arrs, - const std::vector &out_arrs) { - NDArray output_grads = in_arrs[0]->Reorder2Default(); - NDArray input_grads = out_arrs[0]->Reorder2Default(); - mshadow::default_real_t *og = output_grads.data().dptr(); - mshadow::default_real_t *ig = input_grads.data().dptr(); - for (size_t i = 0; i < output_grads.shape().Size(); i++) - ASSERT_EQ(og[i], ig[i]); -} - void VerifyActBackwardsResult(const std::vector &in_arrs, const std::vector &out_arrs) { NDArray tmp1 = in_arrs[0]->Reorder2Default(); // out grads @@ -797,7 +787,7 @@ TEST(IMPERATIVE, UnaryOp) { TEST(IMPERATIVE, CopyBackwardsOp) { OpAttrs attrs = GetCopyBackwardsOp(); - TestOp(attrs, InitDefaultArray, VerifyCopyBackwardsResult); + TestOp(attrs, InitDefaultArray, VerifyCopyResult); } TEST(IMPERATIVE, ActOp) { From 5a1d89920a64360861b6e26d3480330b3a3e576a Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 12:54:31 -0700 Subject: [PATCH 32/39] remove redundant VerifySumResult --- tests/cpp/operator/mkldnn.cc | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 16ca52f154ce..84ce25e90ce5 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -810,15 +810,6 @@ TEST(IMPERATIVE, BinaryBackwardsOp) { TestOp(attrs, InitDefaultArray, VerifySumBackwardsResult); } -void VerifySumMemory(mkldnn::memory in_mem1, mkldnn::memory in_mem2, mkldnn::memory out_mem) { - float *in1 = static_cast(in_mem1.get_data_handle()); - float *in2 = static_cast(in_mem2.get_data_handle()); - float *out = static_cast(out_mem.get_data_handle()); - for (size_t i = 0; i < in_mem1.get_primitive_desc().get_size() / sizeof(float); i++) { - ASSERT_EQ(in1[i] + in2[i], out[i]); - } -} - TEST(MKLDNN_BASE, MKLDNNSum) { std::vector in_arrs = GetTestInputArrays(InitDefaultArray); std::vector in_arrs2 = GetTestInputArrays(InitDefaultArray, true); @@ -835,7 +826,7 @@ TEST(MKLDNN_BASE, MKLDNNSum) { for (auto out_arr : out_arrs) { auto in_mem1 = in_arr.arr.GetMKLDNNData(); - auto in_mem2 = in_arr.arr.GetMKLDNNData(); + auto in_mem2 = in_arr2.arr.GetMKLDNNData(); auto out_mem = out_arr.arr.GetMKLDNNData(in_mem1->get_primitive_desc()); // TODO(alexzai) : remove this noop when by reordering in MKLDNNSum @@ -844,7 +835,7 @@ TEST(MKLDNN_BASE, MKLDNNSum) { PrintVerifyMsg(in_arr, in_arr); op::MKLDNNSum(*in_mem1, *in_mem2, *out_mem); MKLDNNStream::Get()->Submit(); - VerifySumMemory(*in_mem1, *in_mem2, *out_mem); + VerifySumResult({&in_arr.arr, &in_arr2.arr}, {&out_arr.arr}); } // in place @@ -857,7 +848,7 @@ TEST(MKLDNN_BASE, MKLDNNSum) { auto old_mem = orig_arr.arr.GetMKLDNNData(); op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); MKLDNNStream::Get()->Submit(); - VerifySumMemory(*old_mem, *input_mem2, *input_mem); + VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); } } From 923d9d1a76495886d807b8249856c646018eddeb Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 12:56:03 -0700 Subject: [PATCH 33/39] remove unused var --- tests/cpp/operator/mkldnn.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 84ce25e90ce5..bcf760fe2a83 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -845,7 +845,6 @@ TEST(MKLDNN_BASE, MKLDNNSum) { PrintVerifyMsg(orig_arr, in_arr); InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc(), InitDefaultArray); orig_arr.arr.CopyFrom(*input_mem); - auto old_mem = orig_arr.arr.GetMKLDNNData(); op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); MKLDNNStream::Get()->Submit(); VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); From 09164ac991127c401a9669173744d7765d67982a Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 13:03:27 -0700 Subject: [PATCH 34/39] use only InitDefaultArray --- tests/cpp/operator/mkldnn.cc | 82 ++++++++++++++---------------------- 1 file changed, 32 insertions(+), 50 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index bcf760fe2a83..430f6f7412e2 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -93,20 +93,6 @@ TEST(MKLDNN_UTIL_FUNC, MemFormat) { // Init arrays with the default layout. static void InitDefaultArray(NDArray *arr, bool is_rand = false) { - const TBlob &blob = arr->data(); - mshadow::default_real_t *data = blob.dptr(); - size_t size = blob.Size(); - for (size_t i = 0; i < size; i++) { - if (is_rand) { - data[i] = std::rand() % 100; - } else { - data[i] = i; - } - } -} - -// Init arrays with negative and positive values -static void InitNegPosArray(NDArray *arr, bool is_rand = false) { const TBlob &blob = arr->data(); mshadow::default_real_t *data = blob.dptr(); int size = blob.Size(); @@ -120,14 +106,13 @@ static void InitNegPosArray(NDArray *arr, bool is_rand = false) { } } -using InitFunc = std::function; using VerifyFunc = std::function &in_arrs, const std::vector &out_arrs)>; // Init arrays with the specified layout. static void InitMKLDNNArray(NDArray *arr, const mkldnn::memory::primitive_desc &pd, - InitFunc init_fn, bool is_rand = false) { - init_fn(arr, is_rand); + bool is_rand = false) { + InitDefaultArray(arr, is_rand); arr->MKLDNNDataReorderAsync(pd); arr->WaitToRead(); } @@ -335,7 +320,7 @@ TEST(MKLDNN_NDArray, GetDataReorder) { for (int i = 0; i < from_pd.desc().data.ndims; i++) printf("%d, ", from_pd.desc().data.dims[i]); printf("), format: %d\n", from_pd.desc().data.format); - InitMKLDNNArray(&arr, from_pd, InitDefaultArray); + InitMKLDNNArray(&arr, from_pd); for (auto to_pd : pds) { if (to_pd.get_size() / sizeof(mshadow::default_real_t) == s.Size()) { const mkldnn::memory *mem = arr.GetMKLDNNDataReorder(to_pd); @@ -461,7 +446,7 @@ OpAttrs GetSumBackwardsOp() { * reordered to 5 dimensions. * */ -std::vector GetTestInputArrays(InitFunc init_fn, bool rand = false) { +std::vector GetTestInputArrays(bool rand = false) { TestArrayShapes tas = GetTestArrayShapes(); std::vector shapes = tas.shapes; std::vector pds = tas.pds; @@ -472,7 +457,7 @@ std::vector GetTestInputArrays(InitFunc init_fn, bool rand = false // Type 1. NDArray arr(shape, Context()); in_arrs.emplace_back(arr, "Normal NDArray"); - init_fn(&in_arrs.back().arr, rand); + InitDefaultArray(&in_arrs.back().arr, rand); for (auto pd : pds) { if (shape.Size() != pd.get_size() / sizeof(mshadow::default_real_t)) continue; @@ -487,7 +472,7 @@ std::vector GetTestInputArrays(InitFunc init_fn, bool rand = false desc = ss.str(); } in_arrs.emplace_back(arr, desc); - InitMKLDNNArray(&in_arrs.back().arr, pd, init_fn); + InitMKLDNNArray(&in_arrs.back().arr, pd); // Type 4, 5, 6. arr = NDArray(shape, Context()); @@ -498,7 +483,7 @@ std::vector GetTestInputArrays(InitFunc init_fn, bool rand = false << shape.ndim() << "/" << pd.desc().data.ndims; desc = ss.str(); } - InitMKLDNNArray(&arr, pd, init_fn); + InitMKLDNNArray(&arr, pd); in_arrs.emplace_back(arr.Slice(1, arr.shape()[0] - 1), desc); } } @@ -506,7 +491,7 @@ std::vector GetTestInputArrays(InitFunc init_fn, bool rand = false } TEST(MKLDNN_NDArray, GetTestInputArrays) { - std::vector in_arrs = GetTestInputArrays(InitDefaultArray); + std::vector in_arrs = GetTestInputArrays(); int mkldnn_count = 0, mkldnn_view_count = 0; for (auto arr : in_arrs) { if (arr.arr.IsView() && arr.arr.IsMKLDNNData()) { @@ -543,20 +528,19 @@ TEST(MKLDNN_NDArray, GetTestInputArrays) { * 9. Reused NDArray with MKLDNN layout of different dimensions. */ std::vector GetTestOutputArrays(const TShape &shape, - const std::vector &pds, - const InitFunc init_fn) { + const std::vector &pds) { std::vector in_arrs; std::string desc; // Type 1. NDArray arr(shape, Context()); in_arrs.emplace_back(arr, "Normal NDArray"); - init_fn(&in_arrs.back().arr, true); + InitDefaultArray(&in_arrs.back().arr, true); // Type 4. TShape tmp_shape = shape; tmp_shape[0] = shape[0] * 2; NDArray arr0(tmp_shape, Context()); - init_fn(&arr0, true); + InitDefaultArray(&arr0, true); in_arrs.emplace_back(arr0.Slice(1, shape[0] + 1), "Reshaped NDArray"); // Type 5. @@ -565,14 +549,14 @@ std::vector GetTestOutputArrays(const TShape &shape, s[0] = shape.Size(); NDArray arr1(s, Context()); arr1 = arr1.AsArray(shape, arr1.dtype()); - init_fn(&arr1, true); + InitDefaultArray(&arr1, true); in_arrs.emplace_back(arr1, "Reused NDArray"); // Type 6. s[0] = shape.Size() * GetTypeSize(mshadow::default_type_flag); NDArray arr2(s, Context(), true, mshadow::kUint8); arr2 = arr2.AsArray(shape, mshadow::default_type_flag); - init_fn(&arr2, true); + InitDefaultArray(&arr2, true); in_arrs.emplace_back(arr2, "Reused NDArray with diff data type"); // Type 7 @@ -580,7 +564,7 @@ std::vector GetTestOutputArrays(const TShape &shape, NDArray arr3(s, Context(), true, mshadow::kUint8); tmp_shape[0] = shape[0] * 2; arr3 = arr3.AsArray(tmp_shape, mshadow::default_type_flag); - init_fn(&arr3, true); + InitDefaultArray(&arr3, true); in_arrs.emplace_back(arr3.Slice(1, shape[0] + 1), "Reused+Reshaped NDArray"); @@ -598,7 +582,7 @@ std::vector GetTestOutputArrays(const TShape &shape, desc = ss.str(); } in_arrs.emplace_back(arr, desc); - InitMKLDNNArray(&in_arrs.back().arr, pd, init_fn, true); + InitMKLDNNArray(&in_arrs.back().arr, pd, true); // Type 8, 9. // Get a reused version. @@ -606,7 +590,7 @@ std::vector GetTestOutputArrays(const TShape &shape, s[0] = shape.Size(); NDArray arr = NDArray(s, Context()); arr = arr.AsArray(shape, arr.dtype()); - InitMKLDNNArray(&arr, pd, init_fn, true); + InitMKLDNNArray(&arr, pd, true); desc = "Reused MKLDNN NDArray"; if (shape.ndim() != pd.desc().data.ndims) { std::stringstream ss; @@ -707,10 +691,9 @@ TEST(MKLDNN_NDArray, CopyFrom) { TestArrayShapes tas = GetTestArrayShapes(); std::vector pds = tas.pds; - std::vector in_arrs = GetTestInputArrays(InitDefaultArray); + std::vector in_arrs = GetTestInputArrays(); for (auto in_arr : in_arrs) { - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, - InitDefaultArray); + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); for (auto out_arr : out_arrs) { if (in_arr.arr.IsMKLDNNData() && in_arr.arr.IsView()) in_arr.arr = in_arr.arr.Reorder2Default(); @@ -724,7 +707,7 @@ TEST(MKLDNN_NDArray, CopyFrom) { } } -void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { +void TestOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs); std::vector outputs(attrs.num_outputs); std::vector req(attrs.num_outputs); @@ -733,10 +716,10 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { TestArrayShapes tas = GetTestArrayShapes(); std::vector pds = tas.pds; - std::vector in_arrs = GetTestInputArrays(init_fn); + std::vector in_arrs = GetTestInputArrays(); for (auto in_arr : in_arrs) { for (auto dispatch : dispatches) { - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, init_fn); + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); for (auto out_arr : out_arrs) { for (int i = 0; i < attrs.num_inputs; i++) inputs[i] = &in_arr.arr; @@ -755,7 +738,7 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { } for (auto dispatch : dispatches) { - in_arrs = GetTestInputArrays(init_fn); + in_arrs = GetTestInputArrays(); for (auto arr : in_arrs) { // If the array is a view, we shouldn't write data to it. if (arr.arr.IsView()) @@ -782,45 +765,44 @@ void TestOp(const OpAttrs &attrs, InitFunc init_fn, VerifyFunc verify_fn) { TEST(IMPERATIVE, UnaryOp) { OpAttrs attrs = GetCopyOp(); - TestOp(attrs, InitDefaultArray, VerifyCopyResult); + TestOp(attrs, VerifyCopyResult); } TEST(IMPERATIVE, CopyBackwardsOp) { OpAttrs attrs = GetCopyBackwardsOp(); - TestOp(attrs, InitDefaultArray, VerifyCopyResult); + TestOp(attrs, VerifyCopyResult); } TEST(IMPERATIVE, ActOp) { OpAttrs attrs = GetReluOp(); - TestOp(attrs, InitNegPosArray, VerifyActResult); + TestOp(attrs, VerifyActResult); } TEST(IMPERATIVE, ActBackwardsOp) { OpAttrs attrs = GetReluBackwardsOp(); - TestOp(attrs, InitNegPosArray, VerifyActBackwardsResult); + TestOp(attrs, VerifyActBackwardsResult); } TEST(IMPERATIVE, BinaryOp) { OpAttrs attrs = GetSumOp(); - TestOp(attrs, InitDefaultArray, VerifySumResult); + TestOp(attrs, VerifySumResult); } TEST(IMPERATIVE, BinaryBackwardsOp) { OpAttrs attrs = GetSumBackwardsOp(); - TestOp(attrs, InitDefaultArray, VerifySumBackwardsResult); + TestOp(attrs, VerifySumBackwardsResult); } TEST(MKLDNN_BASE, MKLDNNSum) { - std::vector in_arrs = GetTestInputArrays(InitDefaultArray); - std::vector in_arrs2 = GetTestInputArrays(InitDefaultArray, true); + std::vector in_arrs = GetTestInputArrays(); + std::vector in_arrs2 = GetTestInputArrays(true); TestArrayShapes tas = GetTestArrayShapes(); std::vector pds = tas.pds; for (int i = 0; i < in_arrs.size(); i++) { auto in_arr = in_arrs[i]; auto in_arr2 = in_arrs2[i]; - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds, - InitDefaultArray); + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); if (!SupportMKLDNN(in_arr.arr) || !in_arr.arr.IsMKLDNNData() || in_arr.arr.IsView()) continue; @@ -843,7 +825,7 @@ TEST(MKLDNN_BASE, MKLDNNSum) { auto input_mem2 = in_arr2.arr.GetMKLDNNData(); NDArrayAttrs orig_arr(in_arr.arr.Copy(in_arr.arr.ctx()), "In Place Copy"); PrintVerifyMsg(orig_arr, in_arr); - InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc(), InitDefaultArray); + InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc()); orig_arr.arr.CopyFrom(*input_mem); op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); MKLDNNStream::Get()->Submit(); From 831b7d0bb903548fe6e2c0d366b1a2592e54adbb Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 13:08:18 -0700 Subject: [PATCH 35/39] move MKLDNNSum near copy test --- tests/cpp/operator/mkldnn.cc | 80 ++++++++++++++++++------------------ 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 430f6f7412e2..161f6f0432d9 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -707,6 +707,46 @@ TEST(MKLDNN_NDArray, CopyFrom) { } } +TEST(MKLDNN_BASE, MKLDNNSum) { + std::vector in_arrs = GetTestInputArrays(); + std::vector in_arrs2 = GetTestInputArrays(true); + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + for (int i = 0; i < in_arrs.size(); i++) { + auto in_arr = in_arrs[i]; + auto in_arr2 = in_arrs2[i]; + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); + if (!SupportMKLDNN(in_arr.arr) || !in_arr.arr.IsMKLDNNData() || in_arr.arr.IsView()) + continue; + + for (auto out_arr : out_arrs) { + auto in_mem1 = in_arr.arr.GetMKLDNNData(); + auto in_mem2 = in_arr2.arr.GetMKLDNNData(); + auto out_mem = out_arr.arr.GetMKLDNNData(in_mem1->get_primitive_desc()); + + // TODO(alexzai) : remove this noop when by reordering in MKLDNNSum + if (out_mem == nullptr) + continue; + PrintVerifyMsg(in_arr, in_arr); + op::MKLDNNSum(*in_mem1, *in_mem2, *out_mem); + MKLDNNStream::Get()->Submit(); + VerifySumResult({&in_arr.arr, &in_arr2.arr}, {&out_arr.arr}); + } + + // in place + auto input_mem = in_arr.arr.GetMKLDNNData(); + auto input_mem2 = in_arr2.arr.GetMKLDNNData(); + NDArrayAttrs orig_arr(in_arr.arr.Copy(in_arr.arr.ctx()), "In Place Copy"); + PrintVerifyMsg(orig_arr, in_arr); + InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc()); + orig_arr.arr.CopyFrom(*input_mem); + op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); + MKLDNNStream::Get()->Submit(); + VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); + } +} + void TestOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs); std::vector outputs(attrs.num_outputs); @@ -793,44 +833,4 @@ TEST(IMPERATIVE, BinaryBackwardsOp) { TestOp(attrs, VerifySumBackwardsResult); } -TEST(MKLDNN_BASE, MKLDNNSum) { - std::vector in_arrs = GetTestInputArrays(); - std::vector in_arrs2 = GetTestInputArrays(true); - TestArrayShapes tas = GetTestArrayShapes(); - std::vector pds = tas.pds; - - for (int i = 0; i < in_arrs.size(); i++) { - auto in_arr = in_arrs[i]; - auto in_arr2 = in_arrs2[i]; - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); - if (!SupportMKLDNN(in_arr.arr) || !in_arr.arr.IsMKLDNNData() || in_arr.arr.IsView()) - continue; - - for (auto out_arr : out_arrs) { - auto in_mem1 = in_arr.arr.GetMKLDNNData(); - auto in_mem2 = in_arr2.arr.GetMKLDNNData(); - auto out_mem = out_arr.arr.GetMKLDNNData(in_mem1->get_primitive_desc()); - - // TODO(alexzai) : remove this noop when by reordering in MKLDNNSum - if (out_mem == nullptr) - continue; - PrintVerifyMsg(in_arr, in_arr); - op::MKLDNNSum(*in_mem1, *in_mem2, *out_mem); - MKLDNNStream::Get()->Submit(); - VerifySumResult({&in_arr.arr, &in_arr2.arr}, {&out_arr.arr}); - } - - // in place - auto input_mem = in_arr.arr.GetMKLDNNData(); - auto input_mem2 = in_arr2.arr.GetMKLDNNData(); - NDArrayAttrs orig_arr(in_arr.arr.Copy(in_arr.arr.ctx()), "In Place Copy"); - PrintVerifyMsg(orig_arr, in_arr); - InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc()); - orig_arr.arr.CopyFrom(*input_mem); - op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); - MKLDNNStream::Get()->Submit(); - VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); - } -} - #endif From dbe80b0df475b827aa146a3e24b42fa6b8830f59 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 14:28:00 -0700 Subject: [PATCH 36/39] use fallback compute for backwards sum --- src/operator/tensor/elemwise_binary_op_basic.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/operator/tensor/elemwise_binary_op_basic.cc b/src/operator/tensor/elemwise_binary_op_basic.cc index 9b5b9d39248b..7d245238b477 100644 --- a/src/operator/tensor/elemwise_binary_op_basic.cc +++ b/src/operator/tensor/elemwise_binary_op_basic.cc @@ -111,6 +111,9 @@ static void _backward_ElemwiseAddEx(const nnvm::NodeAttrs& attrs, MKLDNNCopy(attrs, ctx, inputs[0], req[0], outputs[0]); MKLDNNCopy(attrs, ctx, inputs[0], req[1], outputs[1]); return; + } else if (common::ContainsOnlyStorage(inputs, kDefaultStorage)) { + FallBackCompute(ElemwiseBinaryOp::BackwardUseNone, attrs, ctx, inputs, req, outputs); + return; } #endif ElemwiseBinaryOp::BackwardUseNoneEx( From b009a74619ec82ea39337c06737e0c9c11ab671f Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 15:07:04 -0700 Subject: [PATCH 37/39] fix verifydefmem test --- tests/cpp/operator/mkldnn.cc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 161f6f0432d9..3c26d9a04243 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -123,8 +123,9 @@ static void VerifyDefMem(const mkldnn::memory &mem) { = static_cast(mem.get_data_handle()); size_t size = pd.get_size() / sizeof(mshadow::default_real_t); size_t num_same = 0; - for (size_t i = 0; i < size; i++) - num_same += data[i] == static_cast(i); + int shift = size >> 1; + for (int i = 0; i < size; i++) + num_same += data[i] == static_cast(i - shift); EXPECT_EQ(num_same, size); } @@ -803,7 +804,7 @@ void TestOp(const OpAttrs &attrs, VerifyFunc verify_fn) { } } -TEST(IMPERATIVE, UnaryOp) { +TEST(IMPERATIVE, CopyOp) { OpAttrs attrs = GetCopyOp(); TestOp(attrs, VerifyCopyResult); } @@ -823,12 +824,12 @@ TEST(IMPERATIVE, ActBackwardsOp) { TestOp(attrs, VerifyActBackwardsResult); } -TEST(IMPERATIVE, BinaryOp) { +TEST(IMPERATIVE, SumOp) { OpAttrs attrs = GetSumOp(); TestOp(attrs, VerifySumResult); } -TEST(IMPERATIVE, BinaryBackwardsOp) { +TEST(IMPERATIVE, SumBackwardsOp) { OpAttrs attrs = GetSumBackwardsOp(); TestOp(attrs, VerifySumBackwardsResult); } From bfc729edb79e75aab5934018901152c177906fda Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Mon, 18 Jun 2018 15:57:45 -0700 Subject: [PATCH 38/39] fix lint --- src/operator/tensor/elemwise_binary_op_basic.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/operator/tensor/elemwise_binary_op_basic.cc b/src/operator/tensor/elemwise_binary_op_basic.cc index 7d245238b477..6fc1ebb76cfd 100644 --- a/src/operator/tensor/elemwise_binary_op_basic.cc +++ b/src/operator/tensor/elemwise_binary_op_basic.cc @@ -112,7 +112,9 @@ static void _backward_ElemwiseAddEx(const nnvm::NodeAttrs& attrs, MKLDNNCopy(attrs, ctx, inputs[0], req[1], outputs[1]); return; } else if (common::ContainsOnlyStorage(inputs, kDefaultStorage)) { - FallBackCompute(ElemwiseBinaryOp::BackwardUseNone, attrs, ctx, inputs, req, outputs); + FallBackCompute( + ElemwiseBinaryOp::BackwardUseNone, + attrs, ctx, inputs, req, outputs); return; } #endif From 6085cce41916f97df4d61030b68ec9e28a61b5c0 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Tue, 19 Jun 2018 20:22:24 -0700 Subject: [PATCH 39/39] move MKLDNNSum test back to bottom --- tests/cpp/operator/mkldnn.cc | 80 ++++++++++++++++++------------------ 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/tests/cpp/operator/mkldnn.cc b/tests/cpp/operator/mkldnn.cc index 3c26d9a04243..655435193851 100644 --- a/tests/cpp/operator/mkldnn.cc +++ b/tests/cpp/operator/mkldnn.cc @@ -708,46 +708,6 @@ TEST(MKLDNN_NDArray, CopyFrom) { } } -TEST(MKLDNN_BASE, MKLDNNSum) { - std::vector in_arrs = GetTestInputArrays(); - std::vector in_arrs2 = GetTestInputArrays(true); - TestArrayShapes tas = GetTestArrayShapes(); - std::vector pds = tas.pds; - - for (int i = 0; i < in_arrs.size(); i++) { - auto in_arr = in_arrs[i]; - auto in_arr2 = in_arrs2[i]; - std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); - if (!SupportMKLDNN(in_arr.arr) || !in_arr.arr.IsMKLDNNData() || in_arr.arr.IsView()) - continue; - - for (auto out_arr : out_arrs) { - auto in_mem1 = in_arr.arr.GetMKLDNNData(); - auto in_mem2 = in_arr2.arr.GetMKLDNNData(); - auto out_mem = out_arr.arr.GetMKLDNNData(in_mem1->get_primitive_desc()); - - // TODO(alexzai) : remove this noop when by reordering in MKLDNNSum - if (out_mem == nullptr) - continue; - PrintVerifyMsg(in_arr, in_arr); - op::MKLDNNSum(*in_mem1, *in_mem2, *out_mem); - MKLDNNStream::Get()->Submit(); - VerifySumResult({&in_arr.arr, &in_arr2.arr}, {&out_arr.arr}); - } - - // in place - auto input_mem = in_arr.arr.GetMKLDNNData(); - auto input_mem2 = in_arr2.arr.GetMKLDNNData(); - NDArrayAttrs orig_arr(in_arr.arr.Copy(in_arr.arr.ctx()), "In Place Copy"); - PrintVerifyMsg(orig_arr, in_arr); - InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc()); - orig_arr.arr.CopyFrom(*input_mem); - op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); - MKLDNNStream::Get()->Submit(); - VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); - } -} - void TestOp(const OpAttrs &attrs, VerifyFunc verify_fn) { std::vector inputs(attrs.num_inputs); std::vector outputs(attrs.num_outputs); @@ -834,4 +794,44 @@ TEST(IMPERATIVE, SumBackwardsOp) { TestOp(attrs, VerifySumBackwardsResult); } +TEST(MKLDNN_BASE, MKLDNNSum) { + std::vector in_arrs = GetTestInputArrays(); + std::vector in_arrs2 = GetTestInputArrays(true); + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + for (int i = 0; i < in_arrs.size(); i++) { + auto in_arr = in_arrs[i]; + auto in_arr2 = in_arrs2[i]; + std::vector out_arrs = GetTestOutputArrays(in_arr.arr.shape(), pds); + if (!SupportMKLDNN(in_arr.arr) || !in_arr.arr.IsMKLDNNData() || in_arr.arr.IsView()) + continue; + + for (auto out_arr : out_arrs) { + auto in_mem1 = in_arr.arr.GetMKLDNNData(); + auto in_mem2 = in_arr2.arr.GetMKLDNNData(); + auto out_mem = out_arr.arr.GetMKLDNNData(in_mem1->get_primitive_desc()); + + // TODO(alexzai) : remove this noop when by reordering in MKLDNNSum + if (out_mem == nullptr) + continue; + PrintVerifyMsg(in_arr, in_arr); + op::MKLDNNSum(*in_mem1, *in_mem2, *out_mem); + MKLDNNStream::Get()->Submit(); + VerifySumResult({&in_arr.arr, &in_arr2.arr}, {&out_arr.arr}); + } + + // in place + auto input_mem = in_arr.arr.GetMKLDNNData(); + auto input_mem2 = in_arr2.arr.GetMKLDNNData(); + NDArrayAttrs orig_arr(in_arr.arr.Copy(in_arr.arr.ctx()), "In Place Copy"); + PrintVerifyMsg(orig_arr, in_arr); + InitMKLDNNArray(&orig_arr.arr, input_mem->get_primitive_desc()); + orig_arr.arr.CopyFrom(*input_mem); + op::MKLDNNSum(*input_mem, *input_mem2, *input_mem); + MKLDNNStream::Get()->Submit(); + VerifySumResult({&orig_arr.arr, &in_arr2.arr}, {&in_arr.arr}); + } +} + #endif