Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[static code gen] support composite grad maker code gen #37

Merged
merged 23 commits into from
Jan 8, 2023
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
0863e6b
support static graph code-gen for squeeze op
zyfncg Dec 29, 2022
b54bd12
generate static graph code of unsqueeze
zyfncg Dec 29, 2022
bb4d8ec
refine op name
zyfncg Dec 29, 2022
6cd4f35
add extra output in op_compat
zyfncg Dec 29, 2022
bf1a2b1
remove debug log
zyfncg Dec 29, 2022
9ed243f
add composite parse
Charles-hit Jan 3, 2023
ca65aad
Merge commit 'refs/pull/49430/head' of /~https://github.com/PaddlePaddl…
Charles-hit Jan 3, 2023
bf9f920
support generate static graph code for imag and real op
zyfncg Jan 3, 2023
504d84b
Merge branch 'develop' of /~https://github.com/PaddlePaddle/Paddle into…
zyfncg Jan 4, 2023
5b22e09
Merge commit 'refs/pull/49523/head' of /~https://github.com/PaddlePaddl…
Charles-hit Jan 5, 2023
043bd4b
Merge branch 'prim_paddle' of /~https://github.com/JiabinYang/Paddle in…
Charles-hit Jan 6, 2023
a40e5d8
Merge branch 'prim_paddle' of /~https://github.com/JiabinYang/Paddle in…
Charles-hit Jan 6, 2023
44c902c
add composite code gen
Charles-hit Jan 7, 2023
09e0192
modify backward yaml
Charles-hit Jan 7, 2023
c97ebba
Merge branch 'develop' of /~https://github.com/PaddlePaddle/Paddle into…
Charles-hit Jan 7, 2023
3ef725d
Merge branch 'prim_paddle' of /~https://github.com/JiabinYang/Paddle in…
Charles-hit Jan 7, 2023
8ae8f0e
fix static composite grad maker code gen
Charles-hit Jan 7, 2023
742c18e
add some static funcs unit test
Charles-hit Jan 8, 2023
e5f11f5
Merge branch 'prim_paddle' of /~https://github.com/JiabinYang/Paddle in…
Charles-hit Jan 8, 2023
bd7eded
fix some bugs
Charles-hit Jan 8, 2023
0f2963d
Merge branch 'prim_paddle' of /~https://github.com/JiabinYang/Paddle in…
Charles-hit Jan 8, 2023
c2ff1a7
fix composite grad maker register code gen
Charles-hit Jan 8, 2023
caec995
optimize some functions
Charles-hit Jan 8, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9,618 changes: 0 additions & 9,618 deletions paddle/fluid/operators/generated_op.cc

This file was deleted.

15 changes: 8 additions & 7 deletions paddle/fluid/operators/generator/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -148,15 +148,16 @@ if(${_result})
message(FATAL_ERROR "sparse operator codegen failed, exiting.")
endif()

# set(generated_static_files
# "${generated_op_path}"
# "${generated_static_op_path}"
# "${generated_sparse_ops_path}"
# "${generated_argument_mapping_path}"
# "${generated_static_argument_mapping_path}"
# "${generated_sparse_argument_mapping_path}")
set(generated_static_files
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this? This is duplicated with line 159

"${generated_op_path}"
"${generated_static_op_path}"
"${generated_sparse_ops_path}"
"${generated_argument_mapping_path}"
"${generated_static_argument_mapping_path}"
"${generated_sparse_argument_mapping_path}")

set(generated_static_files
"${generated_op_path}"
"${generated_static_op_path}"
"${generated_sparse_ops_path}"
"${generated_argument_mapping_path}"
Expand Down
11 changes: 11 additions & 0 deletions paddle/fluid/operators/generator/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,17 @@ def to_int_array_tensors_name(attr):
return to_pascal_case(attr['name']) + 'TensorList'


def to_composite_grad_opmaker_name(backward_op_name):
words = backward_op_name.split("_")
for i in range(len(words)):
words[i] = words[i].strip()
words[i] = words[i].capitalize()
composite_grad_opmaker_name = words[0] + "Composite"
composite_grad_opmaker_name += "".join(word for word in words[1:])
composite_grad_opmaker_name += "OpMaker"
return composite_grad_opmaker_name


def cartesian_prod_attrs(attrs):
items = []
for attr in attrs:
Expand Down
77 changes: 71 additions & 6 deletions paddle/fluid/operators/generator/generate_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import yaml
from filters import (
cartesian_prod_mapping,
to_composite_grad_opmaker_name,
to_input_name,
to_int_array_tensor_name,
to_int_array_tensors_name,
Expand All @@ -32,6 +33,7 @@
from parse_utils import to_named_dict
from tests import (
is_base_op,
is_composite_op,
is_initializer_list,
is_scalar,
is_vec,
Expand All @@ -57,7 +59,9 @@
env.filters["to_input_name"] = to_input_name
env.filters["to_opmaker_name_cstr"] = to_opmaker_name_cstr
env.filters["cartesian_prod_mapping"] = cartesian_prod_mapping
env.filters["to_composite_grad_opmaker_name"] = to_composite_grad_opmaker_name
env.tests["base_op"] = is_base_op
env.tests["composite_op"] = is_composite_op
env.tests["vec"] = is_vec
env.tests["scalar"] = is_scalar
env.tests["initializer_list"] = is_initializer_list
Expand Down Expand Up @@ -153,6 +157,27 @@ def process_int_array(op_item, int_array_configs):
]


def parse_composite_info(ops, backward_ops, backward_op_dict):
for op in ops:
if "backward" in op:
op["phi_backward"] = op["backward"]
for backward_op in backward_ops:
if "backward" in backward_op:
backward_op["phi_backward"] = backward_op["backward"]
for backward_op_name, op_dict in backward_op_dict.items():
if "composite" not in op_dict:
continue
op_dict["composite"]["phi_inputs"] = []
op_dict["composite"]["phi_attrs"] = []
op_dict["composite"]["phi_outputs"] = []
for input in op_dict["inputs"]:
op_dict["composite"]["phi_inputs"].append(input['name'])
for attr in op_dict["attrs"]:
op_dict["composite"]["phi_attrs"].append(attr['name'])
for output in op_dict["outputs"]:
op_dict["composite"]["phi_outputs"].append(output['name'])


# replace name of op and params for OpMaker
def replace_compat_name(op_fluid_map_list, forward_op_dict, backward_op_dict):
def get_phi_and_fluid_op_name(op_item):
Expand All @@ -178,6 +203,37 @@ def update_grad_args_name(op_args, args_alias_map):
)
item['name'] = args_alias_map[item['name'][:-5]] + '_grad'

def add_fluid_info_in_composite(composite_map, args_alias_map):
fluid_input_list = []
fluid_attr_list = []
fluid_output_list = []
# add fluid op inputs
for input in composite_map["phi_inputs"]:
if input in args_alias_map:
fluid_input_list.append(args_alias_map[input])
else:
fluid_input_list.append(input)
# add fluid op attrs
for attr in composite_map["phi_attrs"]:
if attr in args_alias_map:
fluid_attr_list.append(args_alias_map[attr])
else:
fluid_attr_list.append(attr)
# add fluid op outputs
for output in composite_map["phi_outputs"]:
if output in args_alias_map:
fluid_output_list.append(args_alias_map[output])
else:
fluid_output_list.append(output)

composite_map.update(
{
"fluid_inputs": fluid_input_list,
"fluid_attrs": fluid_attr_list,
"fluid_outputs": fluid_output_list,
}
)

def get_param_list_alias(param_list, args_map):
return [
args_map[param] if param in args_map else param
Expand Down Expand Up @@ -307,6 +363,15 @@ def update_grad_op_compat_name(grad_op_item, args_name_map):
continue

backward_op_list = op_args['backward'].split(',')
# add fluid args name in composite map
for backward_op in backward_op_list:
if (
"composite"
in backward_op_dict[backward_op.split('(')[0].strip()]
):
add_fluid_info_in_composite(
backward_op_dict[backward_op]["composite"], args_map
)
_, bw_op_name = get_phi_and_fluid_op_name(backward_op_list[0])
forward_op_item['backward'] = bw_op_name
backward_op_item['op_name'] = bw_op_name
Expand Down Expand Up @@ -406,12 +471,10 @@ def main(
ops = yaml.safe_load(f)
ops = [restruct_io(op) for op in ops]
forward_op_dict = to_named_dict(ops)

with open(backward_yaml_path, "rt") as f:
backward_ops = yaml.safe_load(f)
backward_ops = [restruct_io(op) for op in backward_ops]
backward_op_dict = to_named_dict(backward_ops)

with open(op_version_yaml_path, "rt") as f:
op_versions = yaml.safe_load(f)
# add op version info into op
Expand All @@ -426,6 +489,8 @@ def main(
for bw_op in backward_ops:
bw_op['op_name'] = bw_op['name']

parse_composite_info(ops, backward_ops, backward_op_dict)

replace_compat_name(op_fluid_map_list, forward_op_dict, backward_op_dict)

# prepare for invoke case
Expand All @@ -442,21 +507,21 @@ def main(
op_dict = {}
op_dict.update(forward_op_dict)
op_dict.update(backward_op_dict)

if len(ops) == 0 and len(backward_ops) == 0:
if os.path.isfile(output_op_path):
os.remove(output_op_path)
if os.path.isfile(output_arg_map_path):
os.remove(output_arg_map_path)
return

op_template = env.get_template('op.c.j2')
with open(output_op_path, "wt") as f:
msg = op_template.render(
ops=ops, backward_ops=backward_ops, op_dict=op_dict
ops=ops,
backward_ops=backward_ops,
op_dict=op_dict,
composite_gen_flag=True,
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add TODO here to support all static code gen with composite

)
f.write(msg)

ks_template = env.get_template('ks.c.j2')
with open(output_arg_map_path, 'wt') as f:
msg = ks_template.render(ops=ops, backward_ops=backward_ops)
Expand Down
7 changes: 6 additions & 1 deletion paddle/fluid/operators/generator/generate_sparse_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import yaml
from filters import (
cartesian_prod_mapping,
to_composite_grad_opmaker_name,
to_input_name,
to_int_array_tensor_name,
to_int_array_tensors_name,
Expand Down Expand Up @@ -58,6 +59,7 @@
env.filters["to_input_name"] = to_input_name
env.filters["to_opmaker_name_cstr"] = to_opmaker_name_cstr
env.filters["cartesian_prod_mapping"] = cartesian_prod_mapping
env.filters["to_composite_grad_opmaker_name"] = to_composite_grad_opmaker_name
env.tests["base_op"] = is_base_op
env.tests["vec"] = is_vec
env.tests["scalar"] = is_scalar
Expand Down Expand Up @@ -134,7 +136,10 @@ def main(op_yaml_path, backward_yaml_path, output_op_path, output_arg_map_path):
op_template = env.get_template('sparse_op.c.j2')
with open(output_op_path, "wt") as f:
msg = op_template.render(
ops=ops, backward_ops=backward_ops, op_dict=op_dict
ops=ops,
backward_ops=backward_ops,
op_dict=op_dict,
composite_gen_flag=False,
)
f.write(msg)

Expand Down
7 changes: 6 additions & 1 deletion paddle/fluid/operators/generator/generate_static_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import yaml
from filters import (
cartesian_prod_mapping,
to_composite_grad_opmaker_name,
to_input_name,
to_int_array_tensor_name,
to_int_array_tensors_name,
Expand Down Expand Up @@ -58,6 +59,7 @@
env.filters["to_input_name"] = to_input_name
env.filters["to_opmaker_name_cstr"] = to_opmaker_name_cstr
env.filters["cartesian_prod_mapping"] = cartesian_prod_mapping
env.filters["to_composite_grad_opmaker_name"] = to_composite_grad_opmaker_name
env.tests["base_op"] = is_base_op
env.tests["vec"] = is_vec
env.tests["scalar"] = is_scalar
Expand Down Expand Up @@ -111,7 +113,10 @@ def main(
op_template = env.get_template('op.c.j2')
with open(output_op_path, "wt") as f:
msg = op_template.render(
ops=ops, backward_ops=[], op_dict=forward_op_dict
ops=ops,
backward_ops=[],
op_dict=forward_op_dict,
composite_gen_flag=False,
)
f.write(msg)

Expand Down
28 changes: 26 additions & 2 deletions paddle/fluid/operators/generator/parse_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,26 @@ def parse_forward(op_name: str, forward_config: str) -> Dict[str, Any]:
return forward_cfg


def parse_composite(
op_name: str,
composite_config: str,
) -> Dict[str, Any]:
# composite_config: func(args1, args2,.....)
fname = r'(.*?)'
wspace = r'\s*'
fargs = r'(.*?)'
pattern = fr'{fname}{wspace}\({wspace}{fargs}{wspace}\)'

m = re.search(pattern, composite_config)
func_name = m.group(1)
func_args = m.group(2)

composite_dict = {}
composite_dict["func_name"] = func_name
composite_dict["func_args"] = func_args
return composite_dict


def check_op_config(op_entry, op_name):
base_key_set = (
'op',
Expand Down Expand Up @@ -332,9 +352,9 @@ def parse_op_entry(op_entry: Dict[str, Any], name_field="op"):
op_name = op_entry[name_field]
inputs, attrs = parse_input_and_attr(op_name, op_entry["args"])
outputs = parse_outputs(op_name, op_entry["output"])

if "composite" in op_entry:
composite_dict = parse_composite(op_name, op_entry["composite"])
check_op_config(op_entry, op_name)

# validate default value of DataType and DataLayout
for attr in attrs:
if "default_value" in attr:
Expand Down Expand Up @@ -442,6 +462,10 @@ def parse_op_entry(op_entry: Dict[str, Any], name_field="op"):
invoke = parse_invoke(op_name, op_entry["invoke"])
op["invoke"] = invoke

# has composite ?
if "composite" in op_entry:
op.update({"composite": composite_dict})

# backward
if "backward" in op_entry:
backward = op_entry["backward"]
Expand Down
22 changes: 15 additions & 7 deletions paddle/fluid/operators/generator/templates/op.c.j2
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
{% from "operator_utils.c.j2" import op_maker, backward_op_maker, backward_op_reused_maker, operator, register_op_with_components, register_op_version %}
{% from "operator_utils.c.j2" import op_maker, backward_op_maker, backward_op_reused_maker, operator, register_op_with_components, register_op_version, composite_grad_op_maker %}
// this file is generated by paddle/phi/api/yaml/generator/generate_op.py, do not edit.
#include <string>
#include "paddle/fluid/framework/convert_utils.h"
#include "paddle/fluid/framework/infershape_utils.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/op_version_registry.h"
#include "paddle/fluid/framework/convert_utils.h"
#include "paddle/fluid/prim/api/manual/backward/composite_backward_api.h"
#include "paddle/fluid/prim/utils/static/composite_grad_desc_maker.h"
#include "paddle/fluid/prim/utils/static/desc_tensor.h"
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/infermeta/nullary.h"
#include "paddle/phi/infermeta/unary.h"
#include "paddle/phi/infermeta/backward.h"
#include "paddle/phi/infermeta/binary.h"
#include "paddle/phi/infermeta/ternary.h"
#include "paddle/phi/infermeta/multiary.h"
#include "paddle/phi/infermeta/backward.h"
#include "paddle/phi/infermeta/nullary.h"
#include "paddle/phi/infermeta/ternary.h"
#include "paddle/phi/infermeta/unary.h"

namespace paddle {
namespace operators {
Expand All @@ -36,14 +39,19 @@ using paddle::framework::GradVarName;
{% else %}
{{backward_op_reused_maker(op, op_dict[op["forward"]["name"]], op["invoke"])}}
{% endif %}
{% if composite_gen_flag == True %}
{% if op is composite_op %}
{{composite_grad_op_maker(op_dict[op["name"]])}}
{% endif %}
{% endif %}
{% endfor %}
} // namespace operators
} // namespace paddle

namespace ops = paddle::operators;
{% for op in ops + backward_ops %}
{% if op is base_op %}
{{register_op_with_components(op)}}
{{register_op_with_components(op, op_dict)}}
{{register_op_version(op)}}
{% endif %}
{% endfor %}
Loading