Skip to content

Commit

Permalink
Merge pull request #96 from belgraviton/mxnet_parser_prelu_support
Browse files Browse the repository at this point in the history
Mxnet parser prelu support
  • Loading branch information
kitstar authored Mar 7, 2018
2 parents e66cd5c + 9cb14df commit 78322e1
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions mmdnn/conversion/mxnet/mxnet_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,22 +809,27 @@ def rename_Embedding(self, source_node):
self.set_output_shape(source_node, IR_node)


# IR only support elu from {'elu', 'leaky', 'prelu', 'rrelu'}
# IR only support elu and prelu from {'elu', 'leaky', 'prelu', 'rrelu'}
def rename_LeakyReLU(self, source_node):
# judge whether meaningful
assert "attr"
# attr
layer_attr = self._get_layer_attr(source_node)

if "act_type" in layer_attr:
if not layer_attr["act_type"] == "elu":
if not layer_attr["act_type"] == "elu" and not layer_attr["act_type"] == "prelu":
print("Warning: Activation Type %s is not supported yet." % layer_attr["act_type"])
# return

IR_node = self.IR_graph.node.add()

# name, op
self._copy_and_reop(source_node, IR_node, "Elu")
if layer_attr['act_type'] == 'prelu':
self._copy_and_reop(source_node, IR_node, "PRelu")
# gamma
self.set_weight(source_node.name, "gamma", self.weight_data.get(source_node.name + "_gamma").asnumpy())
else: # All other cases set to 'Elu'
self._copy_and_reop(source_node, IR_node, "Elu")

# input edge
self.convert_inedge(source_node, IR_node)
Expand Down

0 comments on commit 78322e1

Please sign in to comment.