RustNN WPT Conformance

Date: Wed, 18 Feb 2026 17:32:16 GMT | Duration 27.15s

RustNN commit: 35a46b245ce6

Total
2337
Pass
2221
Fail
0
Skip
Pass Rate
95.04%
Files
108

l2Pool2d.https.any.js

0 passed 28 skipped
Skipped (28)
TestVariantReason
l2Pool2d float32 4D constant tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor all negative default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.windowDimensionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.paddingcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.dilationscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.layout=nchwcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.layout=nhwccpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.outputSizes ignores options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.outputSizes ignores options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.dilations with options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D constant tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor all negative default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.windowDimensionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.paddingcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.dilationscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.layout=nchwcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.layout=nhwccpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.outputSizes ignores options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.outputSizes ignores options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.dilations with options.stridescpuunimplemented op(s): l2_pool2d

lstm.https.any.js

0 passed 28 skipped
Skipped (28)
TestVariantReason
lstm float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialCellStatecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with all optionscpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=2, batchSize=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with all optionscpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with bidirectionscpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBiascpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialCellStatecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with all optionscpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with all optionscpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with bidirectionscpuunimplemented op(s): lstm

gru.https.any.js

0 passed 24 skipped
Skipped (24)
TestVariantReason
gru float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu'] and reset_after=truecpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.layout='rzn'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): gru
gru float32 tensors steps=1 all optionscpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='both' and options.returnSequence=truecpuunimplemented op(s): gru
gru float32 tensors steps=2 with all optionscpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu'] and resetAfter=truecpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.layout='rzn'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): gru
gru float16 tensors steps=1 all optionscpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='both' and options.returnSequence=truecpuunimplemented op(s): gru
gru float16 tensors steps=2 with all optionscpuunimplemented op(s): gru

lstm_cell.https.any.js

0 passed 10 skipped
Skipped (10)
TestVariantReason
lstmCell float32 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with all optionscpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with all optionscpuunimplemented op(s): lstm_cell

round_even.https.any.js

0 passed 10 skipped
Skipped (10)
TestVariantReason
roundEven float32 positive 0D scalarcpuunimplemented op(s): round_even
roundEven float32 negative 1D tensorcpuunimplemented op(s): round_even
roundEven float32 2D tensorcpuunimplemented op(s): round_even
roundEven float32 3D tensorcpuunimplemented op(s): round_even
roundEven float32 4D tensorcpuunimplemented op(s): round_even
roundEven float16 positive 0D scalarcpuunimplemented op(s): round_even
roundEven float16 negative 1D tensorcpuunimplemented op(s): round_even
roundEven float16 2D tensorcpuunimplemented op(s): round_even
roundEven float16 3D tensorcpuunimplemented op(s): round_even
roundEven float16 4D tensorcpuunimplemented op(s): round_even

gru_cell.https.any.js

0 passed 8 skipped
Skipped (8)
TestVariantReason
gruCell float32 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru_cell
gruCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru_cell
gruCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and and options.layout='rzn'cpuunimplemented op(s): gru_cell
gruCell float32 tensors with all optionscpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and and options.layout='rzn'cpuunimplemented op(s): gru_cell
gruCell float16 tensors with all optionscpuunimplemented op(s): gru_cell

cumulative_sum.https.any.js

0 passed 7 skipped
Skipped (7)
TestVariantReason
cumulativeSum with float32 input and default options.cpuunimplemented op(s): cumulative_sum
cumulativeSum with float32 input and set exclusive to true.cpuunimplemented op(s): cumulative_sum
cumulativeSum with float32 input and set reversed to true.cpuunimplemented op(s): cumulative_sum
cumulativeSum with float16 input and default options.cpuunimplemented op(s): cumulative_sum
cumulativeSum with float16 input and set exclusive to true.cpuunimplemented op(s): cumulative_sum
cumulativeSum with float16 input and set reversed to true.cpuunimplemented op(s): cumulative_sum
cumulativeSum with int32 input and axis = 2.cpuunimplemented op(s): cumulative_sum

byob_readtensor.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in byob_readtensor.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in byob_readtensor.https.any.js

File parse error: No <name>Tests array found in constant-reshape-optimization.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in constant-reshape-optimization.https.any.js

dequantizeLinear.https.any.js

32 passed 1 skipped
Skipped (1)
TestVariantReason
[unnamed-15]cpuinvalid extracted test case

graph_devices.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in graph_devices.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in graph_devices.https.any.js

File parse error: No <name>Tests array found in inputs-are-not-modified.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in inputs-are-not-modified.https.any.js

File parse error: No <name>Tests array found in inputs-with-special-names.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in inputs-with-special-names.https.any.js

File parse error: No <name>Tests array found in operations-with-special-names.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in operations-with-special-names.https.any.js

parallel-dispatch.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in parallel-dispatch.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in parallel-dispatch.https.any.js

scalars.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in scalars.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in scalars.https.any.js

File parse error: No <name>Tests array found in shared_arraybuffer_constant.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in shared_arraybuffer_constant.https.any.js

tensor.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in tensor.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in tensor.https.any.js

abs.https.any.js

20 passed

add.https.any.js

23 passed

cast.https.any.js

49 passed

ceil.https.any.js

14 passed

clamp.https.any.js

51 passed

cos.https.any.js

14 passed

div.https.any.js

20 passed

elu.https.any.js

20 passed

equal.https.any.js

24 passed

erf.https.any.js

14 passed

exp.https.any.js

14 passed

floor.https.any.js

14 passed

gelu.https.any.js

13 passed

gemm.https.any.js

46 passed

log.https.any.js

14 passed

max.https.any.js

20 passed

min.https.any.js

20 passed

mul.https.any.js

20 passed

neg.https.any.js

19 passed

pad.https.any.js

25 passed

pow.https.any.js

20 passed

prelu.https.any.js

27 passed

relu.https.any.js

17 passed

sin.https.any.js

14 passed

slice.https.any.js

19 passed

split.https.any.js

20 passed

sqrt.https.any.js

14 passed

sub.https.any.js

26 passed

tan.https.any.js

14 passed

tanh.https.any.js

12 passed

where.https.any.js

34 passed