Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WebNN EP] Fix issues of GRU operator #22123

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions onnxruntime/core/providers/webnn/builders/impl/gru_op_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ class GruOpBuilder : public BaseOpBuilder {
const WebnnDeviceType /*device_type*/, const logging::Logger& logger) const override;
bool HasSupportedInputsImpl(const Node& node, const emscripten::val& wnn_limits,
const logging::Logger& logger) const override;
bool HasSupportedOutputsImpl(const Node& node, const emscripten::val& wnn_limits,
const logging::Logger& logger) const override;
};

void GruOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const Node& node) const {
Expand Down Expand Up @@ -219,6 +221,33 @@ bool GruOpBuilder::HasSupportedInputsImpl(const Node& node, const emscripten::va
return IsDataTypeSupportedByOp(op_type, input0_type, wnn_limits, "input", "X", logger);
}

bool LstmOpBuilder::HasSupportedOutputsImpl(const Node& node,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
bool LstmOpBuilder::HasSupportedOutputsImpl(const Node& node,
bool GruOpBuilder::HasSupportedOutputsImpl(const Node& node,

const emscripten::val& wnn_limits,
const logging::Logger& logger) const {
const auto& output_defs = node.OutputDefs();
const auto& op_type = node.OpType();
int32_t Y_type = 0;
int32_t Y_h_type = 0;
bool has_Y = output_defs.size() > 0 && output_defs[0]->Exists();
bool has_Y_h = output_defs.size() > 1 && output_defs[1]->Exists();

if (has_Y && !has_Y_h && GetType(*output_defs[0], Y_type, logger)) {
return IsDataTypeSupportedByOp(op_type, Y_type, wnn_limits, "outputs", "Y", logger);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It may return early if this returns True.

}
if (!has_Y && has_Y_h && GetType(*output_defs[1], Y_h_type, logger)) {
return IsDataTypeSupportedByOp(op_type, Y_h_type, wnn_limits, "outputs", "Y_h", logger);
}
if (has_Y && has_Y_h && GetType(*output_defs[0], Y_type, logger) && GetType(*output_defs[1], Y_h_type, logger)) {
if (Y_type != Y_h_type) {
LOGS(logger, VERBOSE) << "[GRU] Output data types must be the same.";
return false;
}
return IsDataTypeSupportedByOp(op_type, Y_type, wnn_limits, "outputs", "Y", logger);
}

return false;
}

void CreateGruOpBuilder(const std::string& op_type, OpBuilderRegistrations& op_registrations) {
op_registrations.builders.push_back(std::make_unique<GruOpBuilder>());
op_registrations.op_builder_map.emplace(op_type, op_registrations.builders.back().get());
Expand Down