Skip to content

Commit

Permalink
[Utils] infer_paddle_model_shape.py support paddlepaddle2.6 (#1214)
Browse files Browse the repository at this point in the history
* update infer_paddle_model_shape.py
  • Loading branch information
Zheng-Bicheng authored Mar 28, 2024
1 parent e60dc1a commit 5549423
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 35 deletions.
11 changes: 5 additions & 6 deletions tools/paddle/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
python prune_paddle_model.py --model_dir original_paddle_model \
--model_filename model.pdmodel \
--params_filename model.pdiparams \
--output_names unsqueeze2_0.tmp_0 unsqueeze2_0.tmp_0 \
--input_names input0 input1 \
--output_names output0 output1 \
--save_dir new_paddle
```

Expand All @@ -20,11 +21,9 @@ python prune_paddle_model.py --model_dir original_paddle_model \

事实上,这个模型的结构是可以支持动态形态输入的,我们使用 `paddle_infer_shape.py` 脚本进行重新导出模型,再通过 Netron 查看模型,可以看到新模型的输入输出都已经更新了
```
python paddle_infer_shape.py --model_dir ch_PP-OCRv2_det_infer/ \
--model_filename inference.pdmodel \
--params_filename inference.pdiparams \
--save_dir new_model \
--input_shape_dict="{'x':[-1,3,-1,-1]}"
python infer_paddle_model_shape.py --model_path ch_PP-OCRv2_det_infer/inference \
--save_path ch_PP-OCRv2_det_infer/new_inference \
--input_shape_dict="{'x':[-1,3,-1,-1]}"
```
![image-20220331165925526](imgs/new.png)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import argparse
import paddle
import paddle.base as base
import paddle.static as static


def process_old_ops_desc(program):
Expand All @@ -9,9 +12,7 @@ def process_old_ops_desc(program):


def infer_shape(program, input_shape_dict):
import paddle
paddle.enable_static()
import paddle.fluid as fluid

OP_WITHOUT_KERNEL_SET = {
'feed', 'fetch', 'recurrent', 'go', 'rnn_memory_helper_grad',
Expand Down Expand Up @@ -45,43 +46,33 @@ def infer_shape(program, input_shape_dict):
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model_dir',
'--model_path',
required=True,
help='Path of directory saved the input model.')
help='Directory path to input model + model name without suffix.')
parser.add_argument(
'--model_filename', required=True, help='The input model file name.')
parser.add_argument(
'--params_filename', required=True, help='The parameters file name.')
'--input_shape_dict', required=True, help="The new shape information.")
parser.add_argument(
'--save_dir',
'--save_path',
required=True,
help='Path of directory to save the new exported model.')
parser.add_argument(
'--input_shape_dict', required=True, help="The new shape information.")
help='Directory path to save model + model name without suffix.')
return parser.parse_args()


if __name__ == '__main__':
args = parse_arguments()
import paddle
paddle.enable_static()
import paddle.fluid as fluid
input_shape_dict_str = args.input_shape_dict
input_shape_dict = eval(input_shape_dict_str)
print("Start to load paddle model...")
exe = fluid.Executor(fluid.CPUPlace())
[prog, ipts, outs] = fluid.io.load_inference_model(
args.model_dir,
exe,
model_filename=args.model_filename,
params_filename=args.params_filename)
process_old_ops_desc(prog)
infer_shape(prog, input_shape_dict)
fluid.io.save_inference_model(
args.save_dir,
ipts,
outs,
exe,
prog,
model_filename=args.model_filename,
params_filename=args.params_filename)
exe = base.Executor(paddle.CPUPlace())
[program, feed_target_names, fetch_targets] = static.io.load_inference_model(args.model_path, exe)
process_old_ops_desc(program)
infer_shape(program, input_shape_dict)

feed_vars = [program.global_block().var(name) for name in feed_target_names]
static.io.save_inference_model(
args.save_path,
feed_vars=feed_vars,
fetch_vars=fetch_targets,
executor=exe,
program=program)

0 comments on commit 5549423

Please sign in to comment.