Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WebNN EP] Fix issues of GRU operator #22123

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion onnxruntime/core/providers/webnn/builders/helper.h
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ static const InlinedHashMap<std::string, std::string> op_map = {
{"GlobalLpPool", "l2Pool2d"},
{"Greater", "greater"},
{"GreaterOrEqual", "greaterOrEqual"},
{"Gru", "gru"},
{"GRU", "gru"},
{"HardSigmoid", "hardSigmoid"},
{"HardSwish", "hardSwish"},
{"Identity", "identity"},
Expand Down
41 changes: 32 additions & 9 deletions onnxruntime/core/providers/webnn/builders/impl/gru_op_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ class GruOpBuilder : public BaseOpBuilder {
const WebnnDeviceType /*device_type*/, const logging::Logger& logger) const override;
bool HasSupportedInputsImpl(const Node& node, const emscripten::val& wnn_limits,
const logging::Logger& logger) const override;
bool HasSupportedOutputsImpl(const Node& node, const emscripten::val& wnn_limits,
const logging::Logger& logger) const override;
};

void GruOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const Node& node) const {
Expand Down Expand Up @@ -191,30 +193,24 @@ bool GruOpBuilder::HasSupportedInputsImpl(const Node& node, const emscripten::va
const auto& op_type = node.OpType();
int32_t input0_type = 0; // input data type
int32_t input1_type = 0; // weight data type
int32_t input2_type = 0; // recurrentWeight data type
int32_t input2_type = 0; // recurrent weight data type
int32_t input3_type = 0; // bias data type
int32_t input4_type = 0; // recurrentBias data type
int32_t input5_type = 0; // initialHiddenState data type
int32_t input5_type = 0; // initial hidden state data type
bool has_input3 = input_defs.size() > 3 && input_defs[3]->Exists();
bool has_input4 = input_defs.size() > 4 && input_defs[4]->Exists();
bool has_input5 = input_defs.size() > 5 && input_defs[5]->Exists();

if (!GetType(*input_defs[0], input0_type, logger) ||
!GetType(*input_defs[1], input1_type, logger) ||
!GetType(*input_defs[2], input2_type, logger) ||
(has_input3 && !GetType(*input_defs[3], input3_type, logger)) ||
(has_input4 && !GetType(*input_defs[4], input4_type, logger)) ||
(has_input5 && !GetType(*input_defs[5], input5_type, logger))) {
return false;
}

InlinedVector<int32_t, 6> input_types = {input0_type, input1_type, input2_type};
InlinedVector<int32_t, 5> input_types = {input0_type, input1_type, input2_type};
if (has_input3) {
input_types.push_back(input3_type);
}
if (has_input4) {
input_types.push_back(input4_type);
}
if (has_input5) {
input_types.push_back(input5_type);
}
Expand All @@ -225,6 +221,33 @@ bool GruOpBuilder::HasSupportedInputsImpl(const Node& node, const emscripten::va
return IsDataTypeSupportedByOp(op_type, input0_type, wnn_limits, "input", "X", logger);
}

bool LstmOpBuilder::HasSupportedOutputsImpl(const Node& node,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
bool LstmOpBuilder::HasSupportedOutputsImpl(const Node& node,
bool GruOpBuilder::HasSupportedOutputsImpl(const Node& node,

const emscripten::val& wnn_limits,
const logging::Logger& logger) const {
const auto& output_defs = node.OutputDefs();
const auto& op_type = node.OpType();
int32_t Y_type = 0;
int32_t Y_h_type = 0;
bool has_Y = output_defs.size() > 0 && output_defs[0]->Exists();
bool has_Y_h = output_defs.size() > 1 && output_defs[1]->Exists();

if (has_Y && !has_Y_h && GetType(*output_defs[0], Y_type, logger)) {
return IsDataTypeSupportedByOp(op_type, Y_type, wnn_limits, "outputs", "Y", logger);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It may return early if this returns True.

}
if (!has_Y && has_Y_h && GetType(*output_defs[1], Y_h_type, logger)) {
return IsDataTypeSupportedByOp(op_type, Y_h_type, wnn_limits, "outputs", "Y_h", logger);
}
if (has_Y && has_Y_h && GetType(*output_defs[0], Y_type, logger) && GetType(*output_defs[1], Y_h_type, logger)) {
if (Y_type != Y_h_type) {
LOGS(logger, VERBOSE) << "[GRU] Output data types must be the same.";
return false;
}
return IsDataTypeSupportedByOp(op_type, Y_type, wnn_limits, "outputs", "Y", logger);
}

return false;
}

void CreateGruOpBuilder(const std::string& op_type, OpBuilderRegistrations& op_registrations) {
op_registrations.builders.push_back(std::make_unique<GruOpBuilder>());
op_registrations.op_builder_map.emplace(op_type, op_registrations.builders.back().get());
Expand Down