-
Notifications
You must be signed in to change notification settings - Fork 1.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[NNAdapter][IntelOpenVINO] Init support for OpenVINO #8552
[NNAdapter][IntelOpenVINO] Init support for OpenVINO #8552
Conversation
Thanks for your contribution! |
40f9e35
to
da793e4
Compare
da793e4
to
73daabf
Compare
auto mean_node = converter->ConvertToOutputNode(mean_operand); | ||
auto variance_node = converter->ConvertToOutputNode(variance_operand); | ||
// Create <BatchNormInference> Node for Intel OpenVINO | ||
std::shared_ptr<Node> node = |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
auto batch_norm_op
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
auto beta_node = converter->ConvertToOutputNode(bias_operand); | ||
auto mean_node = converter->ConvertToOutputNode(mean_operand); | ||
auto variance_node = converter->ConvertToOutputNode(variance_operand); | ||
// Create <BatchNormInference> Node for Intel OpenVINO |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
这一类注释可以去掉
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
return output_node; | ||
} | ||
|
||
std::shared_ptr<OutputNode> Converter::ConvertToOutputNode( |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
直接用ConvertOperand就行了,不用强调output node
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
core::Operation* operation) { | ||
BATCH_NORMALIZATION_OPERATION_EXTRACT_INPUTS_OUTPUTS | ||
|
||
// Convert operand to Intel OpenVINO's OutputNode |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Convert to OpenVINO nodes
BATCH_NORMALIZATION_OPERATION_EXTRACT_INPUTS_OUTPUTS | ||
|
||
// Convert operand to Intel OpenVINO's OutputNode | ||
auto input_node = converter->GetMappedOutputNode(input_operand); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
直接用GetMappedNode
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
未修改,OutputNode用来修饰方法,变量名用tensor代替
std::shared_ptr<ov::Core> ov_core_{nullptr}; | ||
std::map<core::Operand*, std::vector<std::shared_ptr<OutputNode>>> | ||
output_nodes_; | ||
std::vector<std::shared_ptr<default_opset::Parameter>> parameter_nodes_; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
为啥parameter_nodes_,这里不加 ov_前缀? 是不是得统一下?我建议都不加
Context* context_{nullptr}; | ||
std::vector<NNAdapterOperandType> input_types_; | ||
std::vector<NNAdapterOperandType> output_types_; | ||
std::shared_ptr<ov::Core> ov_core_{nullptr}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
runtime_core_
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
output_nodes_; | ||
std::vector<std::shared_ptr<default_opset::Parameter>> parameter_nodes_; | ||
std::vector<std::shared_ptr<Node>> result_nodes_; | ||
std::shared_ptr<ov::CompiledModel> compiled_ov_model_{nullptr}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
compiled_model_
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
|
||
ElementType ConvertToOVElementType( | ||
const NNAdapterOperandPrecisionCode& precision_code) { | ||
std::map<NNAdapterOperandPrecisionCode, ElementType> precision_map{ |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
为啥要用map,用switch 不是更简单更快吗?
|
||
#define FUNCTION_ADD_CONST_OUTPUT_NODE_DEFINE(type, element_type) \ | ||
template <> \ | ||
std::shared_ptr<OutputNode> AddConstOutputNode<type>( \ |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
用CreateOVConstantNode或AddOVConstantNode?
UpdateOutputNodeMap(operand, output_node); | ||
return output_node; | ||
} | ||
NNADAPTER_LOG(FATAL) << "Only constant and model input operands can be " |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
直接用OpenVINO吧,不要加Intel了,有点显得多余
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
} | ||
return Shape(ov_shape); | ||
} | ||
// Add const ov::Node and return ov::Output<ov::Node> |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
为啥不直接在utility.h定义
// Convert C/C++ POD types to ElementType
template
ElementType GetElementType() {
NNADAPTER_LOG(FATAL) << "Unable to convert " << typeid(T).name() << " to ElementType";
return ov::element::f32;
}
template <>
ElementType GetElementType();
template <>
ElementType GetElementType<int8_t>();
template <>
ElementType GetElementType<int16_t>();
....
template
std::shared_ptr AddConstOutputNode(
std::vector<size_t> dimensions, std::vector values) {
auto const_node = std::make_shared<default_opset::Constant>(
GetElementType(), Shape(dimensions), values);
return std::make_shared(const_node->output(0));
}
ConvertElementwise, | ||
"huawei_ascend_npu,huawei_kirin_npu,imagination_nna,kunlunxin_xtcl"); | ||
"kunlunxin_xtcl,cambricon_mlu,android_nnapi,intel_openvino"); | ||
REGISTER_CONVERTER(elementwise_max, |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
这个文件好好检查下,不要误删其它硬件
558a882
to
8b5ea93
Compare
8b5ea93
to
2fae09d
Compare
REGISTER_CONVERTER(SUB, ConvertElementwise) | ||
REGISTER_CONVERTER(TANH, ConvertUnaryActivations) | ||
|
||
#endif // LITE_BACKENDS_NNADAPTER_NNADAPTER_SRC_DRIVER_INTEL_OPENVINO_CONVERTER_ALL_H_ |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
NNADAPTER_DRIVER_INTEL_OPENVINO_CONVERTER_ALL_H
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
int ConvertSoftmax(Converter* converter, core::Operation* operation) { | ||
SOFTMAX_OPERATION_EXTRACT_INPUTS_OUTPUTS | ||
|
||
// Convert operand to OpenVINO Tensor |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Tensor 第一个字母小写,其它地方都改一下
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
done
2fae09d
to
1d39f14
Compare
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM
./lite/tools/build_linux.sh --arch=x86 --with_nnadapter=ON --nnadapter_with_intel_openvino=ON --nnadapter_intel_openvino_sdk_root=${nnadapter_intel_openvino_sdk_root}