Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions js/web/docs/webnn-operators.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ operators and the supported opset domain/versions in **WebNN EP** by ONNX Runtim
| ScatterElements | ai.onnx(11-12, 13-15, 16-17, 18+) | scatterElements | ✗ | ✓ | Only supports 'reduction' == 'none' |
| ScatterND | ai.onnx(11-12, 13-15, 16-17, 18+) | scatterND | ✗ | ✓ | Only supports 'reduction' == 'none' |
| Shape | ai.onnx(7-12, 13-14, 15-18, 19-20, 21+) | slice | ✓ | ✓ | |
| SimplifiedLayerNormalization | ai.onnx(1+) | pow + reduceMean + add + sqrt + div + mul | ✓ | ✓ | |
| Sigmoid | ai.onnx(7-12, 13+) | sigmoid | ✓ | ✓ | |
| Softplus | ai.onnx(7+) | softplus | ✓ | ✓ | |
| Softsign | ai.onnx(7+) | softsign | ✓ | ✓ | |
Expand Down
1 change: 1 addition & 0 deletions onnxruntime/core/providers/webnn/builders/helper.h
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,7 @@ static const InlinedHashMap<std::string, std::string> op_map = {
{"ScatterND", "scatterND"},
{"Shape", "slice"},
{"Sigmoid", "sigmoid"},
{"SimplifiedLayerNormalization", "layerNormalization"},
{"Softplus", "softplus"},
{"Softsign", "softsign"},
{"Sin", "sin"},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ Status NormalizationOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder
}

NodeAttrHelper helper(node);
options.set("epsilon", helper.Get("epsilon", 1e-05f));
const auto epsilon = helper.Get("epsilon", 1e-05f);
options.set("epsilon", epsilon);

emscripten::val output = emscripten::val::undefined();
if (op_type == "BatchNormalization") {
Expand All @@ -84,14 +85,71 @@ Status NormalizationOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder
}

output = model_builder.GetBuilder().call<emscripten::val>("batchNormalization", input, mean, variance, options);
} else if (op_type == "LayerNormalization") {
} else if (op_type == "LayerNormalization" || op_type == "SimplifiedLayerNormalization") {
int64_t axis = helper.Get("axis", -1);
axis = HandleNegativeAxis(axis, rank);
std::vector<uint32_t> axes(rank - SafeInt<uint32_t>(axis));
std::iota(axes.begin(), axes.end(), axis);

options.set("axes", emscripten::val::array(axes));
output = model_builder.GetBuilder().call<emscripten::val>("layerNormalization", input, options);
if (op_type == "LayerNormalization") {
options.set("axes", emscripten::val::array(axes));
output = model_builder.GetBuilder().call<emscripten::val>("layerNormalization", input, options);
} else { // SimplifiedLayerNormalization
/**
WebNN doesn't support SimplifiedLayerNormalization. So decompose it into a series of ops:
X --> Pow --> ReduceMean --> Add --> Sqrt --> Div -> Mul
^ ^ ^ ^ ^
| | | | |
Y:2 axis B:epsilon A:X A:scale
*/

int32_t input_type;
ORT_RETURN_IF_NOT(GetType(*input_defs[0], input_type, logger), "Cannot get input type");
emscripten::val common_options = emscripten::val::object();

// Pow
emscripten::val pow_constant_desc = emscripten::val::object();
ORT_RETURN_IF_NOT(SetWebnnDataType(pow_constant_desc, input_type), "Unsupported data type");
pow_constant_desc.set("shape", emscripten::val::array());
emscripten::val pow_buffer = emscripten::val::global("Float32Array").new_(1);
pow_buffer.set(0, 2);
emscripten::val pow_constant =
model_builder.GetBuilder().call<emscripten::val>("constant", pow_constant_desc, pow_buffer);
common_options.set("label", node.Name() + "_pow");
emscripten::val pow =
model_builder.GetBuilder().call<emscripten::val>("pow", input, pow_constant, common_options);

// ReduceMean
emscripten::val reduce_options = emscripten::val::object();
reduce_options.set("axes", emscripten::val::array(axes));
reduce_options.set("keepDimensions", true);
reduce_options.set("label", node.Name() + "_reduceMean");
emscripten::val reduce_mean = model_builder.GetBuilder().call<emscripten::val>("reduceMean", pow, reduce_options);

// Add
emscripten::val add_constant_desc = emscripten::val::object();
ORT_RETURN_IF_NOT(SetWebnnDataType(add_constant_desc, input_type), "Unsupported data type");
add_constant_desc.set("shape", emscripten::val::array());
emscripten::val add_buffer = emscripten::val::global("Float32Array").new_(1);
add_buffer.set(0, epsilon);
emscripten::val add_constant =
model_builder.GetBuilder().call<emscripten::val>("constant", add_constant_desc, add_buffer);
common_options.set("label", node.Name() + "_add");
emscripten::val add =
model_builder.GetBuilder().call<emscripten::val>("add", reduce_mean, add_constant, common_options);

// Sqrt
common_options.set("label", node.Name() + "_sqrt");
emscripten::val sqrt = model_builder.GetBuilder().call<emscripten::val>("sqrt", add, common_options);

// Div
common_options.set("label", node.Name() + "_div");
emscripten::val div = model_builder.GetBuilder().call<emscripten::val>("div", input, sqrt, common_options);

// Mul
common_options.set("label", node.Name() + "_mul");
output = model_builder.GetBuilder().call<emscripten::val>("mul", scale, div, common_options);
}
} else if (op_type == "InstanceNormalization") {
// WebNN spec only supports 4D input for instanceNormalization.
// Supports 3D input by prepending 1 size dimension.
Expand Down Expand Up @@ -229,6 +287,7 @@ void CreateNormalizationOpBuilder(const std::string& op_type, OpBuilderRegistrat
"BatchNormalization",
"InstanceNormalization",
"LayerNormalization",
"SimplifiedLayerNormalization",
};

op_registrations.builders.push_back(std::make_unique<NormalizationOpBuilder>());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@ static OpBuilderRegistrations CreateOpBuilderRegistrations() {
CreateNormalizationOpBuilder("BatchNormalization", op_registrations);
CreateNormalizationOpBuilder("InstanceNormalization", op_registrations);
CreateNormalizationOpBuilder("LayerNormalization", op_registrations);
CreateNormalizationOpBuilder("SimplifiedLayerNormalization", op_registrations);
}

{ // Pad
Expand Down
Loading