Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
Superjomn committed Aug 16, 2017
1 parent 9eaef75 commit 1543eeb
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 8 deletions.
7 changes: 7 additions & 0 deletions paddle/framework/pybind.cc
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,13 @@ All parameter, weight, gradient are variables in Paddle.
const std::shared_ptr<operators::NetOp> &net) -> void {
self.set_stepnet(net);
});

rnn.def("backward", [](const operators::RecurrentOp &forwardOp,
const std::unordered_set<std::string> &no_grad_vars) {
const auto &op = *static_cast<const OperatorBase *>(&forwardOp);
return Backward(op, no_grad_vars);
});

ExposeOperator(rnn);

m.def("unique_integer", UniqueIntegerGenerator);
Expand Down
1 change: 0 additions & 1 deletion paddle/operators/recurrent_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope) const {
// Now all variables in scope must be created outside of op.
PADDLE_ENFORCE_NOT_NULL(stepnet_);
PADDLE_ENFORCE(!(*stepnet_)->Outputs().empty(), "stepnet_ op has no outputs");
PADDLE_ENFORCE(!(*stepnet_)->Outputs().empty(), "net_op has no outputs");

if (seq_len_ > step_scopes->size()) {
for (size_t i = step_scopes->size(); i < seq_len_; ++i) {
Expand Down
12 changes: 6 additions & 6 deletions python/paddle/v2/framework/tests/gradient_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@ def get_numeric_gradient(op,
local_scope=None):
"""
Get Numeric Gradient for an operator's input.
:param op: C++ operator instance, could be an network
:param input_values: The input variables. Should be an dictionary, key is
variable name. Value is numpy array.
:param output_name: The final output variable name.
:param op: C++ operator instance, could be an network
:param input_values: The input variables. Should be an dictionary, key is
variable name. Value is numpy array
:param output_name: The final output variable name.
:param input_to_check: The input variable need to get gradient.
:param delta: The perturbation value for numeric gradient method. The
:param delta: The perturbation value for numeric gradient method. The
smaller delta is, the more accurate result will get. But if that delta is
too small, it could occur numerical stability problem.
:param local_scope: The local scope used for get_numeric_gradient.
Expand Down
40 changes: 39 additions & 1 deletion python/paddle/v2/framework/tests/test_recurrent_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import unittest
import numpy as np
from paddle.v2.framework.op import Operator, RecurrentOp
from gradient_checker import GradientChecker


def py_sigmoid(x):
Expand Down Expand Up @@ -69,7 +70,7 @@ def create_tensor(scope, name, shape, np_data):
return tensor


class TestRecurrentOp(unittest.TestCase):
class RecurrentOpTest(unittest.TestCase):
'''
Test RNNOp
Expand Down Expand Up @@ -164,5 +165,42 @@ def test_forward(self):
self.assertEqual(pd_output.shape, py_output.shape)


class RecurrentGradientOpTest(unittest.TestCase):
def create_forward_op(self):
self.forward_op = RecurrentOp(
# inputs
inlinks=["x"],
boot_memories=["h_boot"],
step_net="stepnet",
# outputs
outlinks=["h"],
step_scopes="step_scopes",
# attributes
inlink_alias=["x@alias"],
outlink_alias=["h@alias"],
pre_memories=["h@pre"],
memories=["h@alias"])

# create a stepnet for RNN
stepnet = core.Net.create()
x_fc_op = Operator("mul", X="x@alias", Y="W", Out="Wx")
h_fc_op = Operator("mul", X="h@pre", Y="U", Out="Uh")
sum_op = Operator("add_two", X="Wx", Y="Uh", Out="sum")
sig_op = Operator("sigmoid", X="sum", Y="h@alias")

for op in [x_fc_op, h_fc_op, sum_op, sig_op]:
stepnet.add_op(op)
stepnet.complete_add_op(True)
self.forward_op.set_stepnet(stepnet)

def create_gradient_op(self):
a = set()
backward_op = core.RecurrentOp.backward(self.forward_op, a)

def test_grad(self):
self.create_forward_op()
self.create_gradient_op()


if __name__ == '__main__':
unittest.main()

0 comments on commit 1543eeb

Please sign in to comment.