Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dygraph Prim code gen (node.cc) #33

Merged
merged 24 commits into from
Jan 6, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ add_custom_target(
${forwards_cc_path}
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_forwards_h_path}
${forwards_h_path}
# COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_nodes_cc_path}
# ${nodes_cc_path}
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_nodes_cc_path}
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

remove additional generated files

${nodes_cc_path}
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${tmp_nodes_h_path}
${nodes_h_path}
VERBATIM)
Expand Down
25 changes: 25 additions & 0 deletions paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,23 @@ def ParseYamlInplaceInfo(string):
return inplace_map


def ParseYamlCompositeInfo(string):
# example composite: fun(args1, args2,.....)
fname = r'(.*?)'
wspace = r'\s*'
fargs = r'(.*?)'
pattern = (
fr'{fname}{wspace}\({wspace}{fargs}{wspace}\)'
)

m = re.search(pattern, string)
composite_fun_info = []
composite_fun_info.append(m.group(1))
func_args = m.group(2).split(",")
for fun_arg in func_args:
composite_fun_info.append(fun_arg.strip())

return composite_fun_info
####################
# Generator Base #
####################
Expand Down Expand Up @@ -438,6 +455,7 @@ def __init__(self, forward_api_contents, namespace):
# Special Op Attributes
self.optional_inputs = [] # [name, ...]
self.no_need_buffers = [] # [name, ...]
self.composite_func_info = [] # [func_name, input_name, ...]
self.intermediate_outputs = [] # [name, ...]
self.forward_inplace_map = {} # {name : name, ...}

Expand All @@ -459,6 +477,13 @@ def ParseNoNeedBuffer(self):
name = RemoveSpecialSymbolsInName(name)
self.no_need_buffers.append(name.strip())

def ParseComposite(self):
grad_api_contents = self.grad_api_contents

if 'composite' in grad_api_contents.keys():
composite_str = grad_api_contents['composite']
self.composite_func_info = ParseYamlCompositeInfo(composite_str)

def ParseDispensable(self):
forward_api_contents = self.forward_api_contents

Expand Down
33 changes: 32 additions & 1 deletion paddle/fluid/eager/auto_code_generator/generator/eager_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,9 @@ class {} : public egr::GradNodeBase {{
#include "paddle/fluid/eager/nan_inf_utils.h"
#include "paddle/phi/api/include/sparse_api.h"
#include "paddle/fluid/eager/api/manual/eager_manual/nodes/nodes.h"
#include "paddle/fluid/prim/api/manual/backward/composite_backward_api.h"
#include "paddle/fluid/prim/api/all.h"
#include "paddle/fluid/prim/utils/utils.h"
DECLARE_bool(check_nan_inf);
{}
"""
Expand Down Expand Up @@ -546,6 +549,7 @@ def __init__(
# self.forward_outputs_position_map
# self.optional_inputs
# self.no_need_buffers
# self.composite_func_info
# self.intermediate_outputs
# self.forward_inplace_map
FunctionGeneratorBase.__init__(self, forward_api_contents, namespace)
Expand Down Expand Up @@ -871,6 +875,7 @@ def GenerateNodeCreationCodes(self, for_backward=False):
backward_grad_outputs_map = self.backward_grad_outputs_map
backward_attrs_list = self.backward_attrs_list
optional_inputs = self.optional_inputs
is_composite_forward_api = False if self.composite_func_info == [] else True

# Pass Stop Gradient Args
pass_stop_gradient_args_str = self.GetPassStopGradientArgsList(
Expand Down Expand Up @@ -1056,6 +1061,8 @@ def run(self):
self.ParseBackwardInplaceInfo()
# Parse no_need_buffer
self.ParseNoNeedBuffer()
# Parse composite
self.ParseComposite()

# Parse optional_inputs
self.ParseDispensable()
Expand Down Expand Up @@ -1826,16 +1833,19 @@ def GenerateHigherOrderNodeCreationCode(self):
is_invoke_forward_api = IsInvokeForwardApi(
self.grad_api_contents, self.forward_apis_dict
)
is_composite_forward_api = False if self.composite_func_info == [] else True
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why composite forward api?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

replace by composite grad api


if next_node_generator is not None:
has_higher_order_node = True
return (
has_higher_order_node,
is_invoke_forward_api,
is_composite_forward_api,
next_grad_node_creation_str,
next_grad_node_out_list,
next_node_generator.backward_forward_inputs_map,
)
elif not is_invoke_forward_api:
elif not is_invoke_forward_api and not is_composite_forward_api:
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what if it hits else branch?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if is_invoke_farward_api or is_composite_grad_api next_grad_node_creation_str should be none, we will add this when delete Flags_prim_enabled
add TODO here

next_grad_node_creation_str = f""" if(trace_backward) {{
PADDLE_THROW(phi::errors::Unavailable(
\"The Op {self.backward_api_name} doesn't have any grad\"
Expand All @@ -1845,6 +1855,7 @@ def GenerateHigherOrderNodeCreationCode(self):
return (
has_higher_order_node,
is_invoke_forward_api,
is_composite_forward_api,
next_grad_node_creation_str,
next_grad_node_out_list,
None,
Expand Down Expand Up @@ -1942,13 +1953,15 @@ def GenerateNodeDefinition(
self,
has_higher_order_node,
is_invoke_forward_api,
is_composite_grad_api,
next_grad_node_creation_str,
next_grad_node_out_list,
backward_forward_inputs_map_next,
):
namespace = self.namespace
forward_api_name = self.forward_api_name
backward_api_name = self.backward_api_name
composite_backward_api_name = self.composite_func_info[0] if is_composite_grad_api else None
backward_forward_inputs_map = self.backward_forward_inputs_map
backward_grad_inputs_map = self.backward_grad_inputs_map
backward_grad_outputs_map = self.backward_grad_outputs_map
Expand Down Expand Up @@ -2133,6 +2146,7 @@ def GenerateNodeDefinition(
# Grad Function Call String
slot_num_bwd_outputs = len(self.forward_inputs_position_map.keys())
grad_api_namespace = f"paddle::experimental::{namespace}"
composite_grad_api_namespace = f"paddle::prim::{namespace}"
grad_function_prepare_str = f"""
const auto& out_metas = OutputMeta();
paddle::small_vector<std::vector<paddle::experimental::Tensor>, egr::kSlotSmallVectorSize> returns({slot_num_bwd_outputs});
Expand Down Expand Up @@ -2203,6 +2217,8 @@ def GenerateNodeDefinition(
}}"""

grad_api_args_str = ", ".join(grad_api_args)
composite_grad_api_args_str = ", ".join(grad_api_args)
composite_template_name = "<paddle::experimental::Tensor>"

if is_invoke_forward_api:
autograd_api_out = "auto"
Expand All @@ -2225,6 +2241,16 @@ def GenerateNodeDefinition(
{out_assign_str}}} else {{
{indent}{autograd_api_out} api_output = paddle::experimental::{self.namespace}{self.grad_api_contents['invoke']};
{out_assign_str}{indent}}}
"""
elif is_composite_grad_api:
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Leave TODO here to indicate strategy which will be used later, such as using composite only when we don't have backward kernel

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be statically generated here

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

grad_function_call_str = f"""
if (paddle::prim::PrimCommonUtils::IsPrimEnabled()) {{
{indent}{composite_grad_api_namespace}{composite_backward_api_name}{composite_template_name}({composite_grad_api_args_str});
VLOG(4) << paddle::string::Sprintf("composite api %s is called" , "{composite_backward_api_name}");
}}else{{
{indent}{grad_api_namespace}{backward_api_name}({grad_api_args_str});
VLOG(4) << paddle::string::Sprintf("origin api %s is called" , "{backward_api_name}");
}}
"""
else:
grad_function_call_str = f"""
Expand Down Expand Up @@ -2328,6 +2354,9 @@ def GenerateNodeDefinition(
var_str += f"\n{indent} output_str += output_{new_name}_str; "

log_str = AFTER_LOG_PRINT_TEMPLATE.format(var_str)
# TODO Ruting modify in the future
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO with wrong format

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

modified

# if is_composite_forward_api:
# next_grad_node_creation_str = ''

self.node_definition_str = GRAD_FUNCTION_TEMPLATE.format(
grad_node_name,
Expand Down Expand Up @@ -2361,6 +2390,7 @@ def run(self):
(
has_higher_order_node,
is_invoke_forward_api,
is_composite_grad_api,
next_grad_node_creation_str,
next_grad_node_out_list,
backward_forward_inputs_map,
Expand All @@ -2371,6 +2401,7 @@ def run(self):
self.GenerateNodeDefinition(
has_higher_order_node,
is_invoke_forward_api,
is_composite_grad_api,
next_grad_node_creation_str,
next_grad_node_out_list,
backward_forward_inputs_map,
Expand Down
1 change: 1 addition & 0 deletions paddle/fluid/operators/generator/parse_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,7 @@ def check_op_config(op_entry, op_name):
'intermediate',
'no_need_buffer',
'data_transform',
'composite'
)
infer_meta_key_set = ('func', 'param')
kernel_key_set = ('func', 'param', 'data_type', 'layout', 'backend')
Expand Down
1 change: 1 addition & 0 deletions paddle/phi/api/yaml/backward.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1259,6 +1259,7 @@
param : [out]
kernel :
func : tanh_grad
composite : tanh_grad(out, out_grad, x_grad)
backward : tanh_double_grad
inplace : (out_grad -> x_grad)

Expand Down