Skip to content

Commit

Permalink
Added Eager Dygraph support for user_defined_grads (#39309)
Browse files Browse the repository at this point in the history
  • Loading branch information
jim19930609 authored Jan 28, 2022
1 parent 75923a3 commit 76103c8
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 8 deletions.
21 changes: 16 additions & 5 deletions python/paddle/fluid/tests/unittests/op_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import _in_eager_mode
from paddle.fluid.framework import _test_eager_guard
from paddle.fluid.backward import append_backward
from paddle.fluid.op import Operator
Expand Down Expand Up @@ -1831,11 +1832,21 @@ def _get_dygraph_grad(self,
for no_grad_val in no_grad_set:
del (inputs[no_grad_val])

grad_inputs = paddle.grad(
outputs=fluid.layers.utils.flatten(outputs),
inputs=fluid.layers.utils.flatten(inputs),
grad_outputs=grad_outputs)
return [grad.numpy() for grad in grad_inputs]
if _in_eager_mode():
core.eager.run_backward(
fluid.layers.utils.flatten(outputs), grad_outputs,
False)
grad_inputs = []
for inputs_list in inputs.values():
for inp in inputs_list:
grad_inputs.append(inp.grad.numpy())
return grad_inputs
else:
grad_inputs = paddle.grad(
outputs=fluid.layers.utils.flatten(outputs),
inputs=fluid.layers.utils.flatten(inputs),
grad_outputs=grad_outputs)
return [grad.numpy() for grad in grad_inputs]

@staticmethod
def _numpy_to_lod_tensor(np_value, lod, place):
Expand Down
6 changes: 6 additions & 0 deletions python/paddle/fluid/tests/unittests/test_diag_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import paddle.fluid as fluid
from paddle.fluid import core
from paddle.fluid import Program, program_guard
from paddle.fluid.framework import _test_eager_guard


class TestDiagV2Op(OpTest):
Expand Down Expand Up @@ -239,6 +240,9 @@ def run_static(self, use_gpu=False):
def test_cpu(self):
paddle.disable_static(place=paddle.fluid.CPUPlace())
self.run_imperative()
with _test_eager_guard():
self.run_imperative()

paddle.enable_static()

with fluid.program_guard(fluid.Program()):
Expand All @@ -250,6 +254,8 @@ def test_gpu(self):

paddle.disable_static(place=paddle.fluid.CUDAPlace(0))
self.run_imperative()
with _test_eager_guard():
self.run_imperative()
paddle.enable_static()

with fluid.program_guard(fluid.Program()):
Expand Down
12 changes: 9 additions & 3 deletions python/paddle/fluid/tests/unittests/test_diagonal_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.tensor as tensor
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand All @@ -33,10 +34,10 @@ def setUp(self):
self.outputs = {'Out': self.target}

def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)

def test_check_grad(self):
self.check_grad(['Input'], 'Out')
self.check_grad(['Input'], 'Out', check_eager=True)

def init_config(self):
self.case = np.random.randn(10, 5, 2).astype('float64')
Expand Down Expand Up @@ -79,7 +80,8 @@ def test_check_grad(self):
['Input'],
'Out',
user_defined_grads=[self.grad_x],
user_defined_grad_outputs=[self.grad_out])
user_defined_grad_outputs=[self.grad_out],
check_eager=True)


class TestDiagonalOpCase3(TestDiagonalOp):
Expand Down Expand Up @@ -122,6 +124,10 @@ def test_api_dygraph(self):
self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True)
paddle.enable_static()

def test_api_eager_dygraph(self):
with _test_eager_guard():
self.test_api_dygraph()


if __name__ == '__main__':
unittest.main()
12 changes: 12 additions & 0 deletions python/paddle/fluid/tests/unittests/test_digamma_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import paddle.fluid as fluid
import paddle.static as static
from op_test import OpTest
from paddle.fluid.framework import _test_eager_guard


class TestDigammaOp(OpTest):
Expand Down Expand Up @@ -94,6 +95,10 @@ def test_in_dynamic_mode(self):
res = paddle.digamma(input_t).numpy()
self.assertEqual(np.allclose(res, sc_res, rtol=1e-05), True)

def test_in_eager_dynamic_mode(self):
with _test_eager_guard():
self.test_in_dynamic_mode()

def test_name_argument(self):
with static.program_guard(static.Program()):
x = static.data(name="x", shape=self._shape, dtype=self.dtypes[0])
Expand All @@ -114,6 +119,13 @@ def test_dtype_error(self):
input_t = paddle.to_tensor(input)
res = paddle.digamma(input_t)

with self.assertRaises(RuntimeError):
with fluid.dygraph.guard():
with _test_eager_guard():
input = np.random.random(self._shape).astype("int32")
input_t = paddle.to_tensor(input)
res = paddle.digamma(input_t)


if __name__ == "__main__":
unittest.main()
5 changes: 5 additions & 0 deletions python/paddle/fluid/tests/unittests/test_trunc_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down Expand Up @@ -78,6 +79,10 @@ def test_api_dygraph(self):
self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True)
paddle.enable_static()

def test_api_eager_dygraph(self):
with _test_eager_guard():
self.test_api_dygraph()

def test_errors(self):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.fluid.data('X', [20, 20], 'bool')
Expand Down

0 comments on commit 76103c8

Please sign in to comment.