RustNN WPT Conformance

Date: Thu, 05 Mar 2026 07:47:53 GMT | Duration 27.34s

RustNN commit: 4393d1cf1bc9

Total
2346
Pass
2236
Fail
2
Skip
Pass Rate
95.31%
Files
108

relu.https.any.js

15 passed 2 failed
Failures (2)
TestVariantError
relu int32 4D tensorcpuvalue mismatch for reluOutput[2]: expected 2147483646, got -2147483648, absDiff=4294967294, ulp=2650800128, ulpTol=0
relu int64 4D tensorcpuvalue mismatch for reluOutput[4]: expected 9223372036854775807, got -9223372036854775808

l2Pool2d.https.any.js

0 passed 28 skipped
Skipped (28)
TestVariantReason
l2Pool2d float32 4D constant tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor all negative default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.windowDimensionscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.paddingcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.dilationscpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.layout=nchwcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.layout=nhwccpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.outputSizes ignores options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.outputSizes ignores options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float32 4D tensor options.dilations with options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D constant tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor all positive default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor all negative default optionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.windowDimensionscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.paddingcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.stridescpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.dilationscpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.layout=nchwcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.layout=nhwccpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.outputSizes ignores options.roundingType=floorcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.outputSizes ignores options.roundingType=ceilcpuunimplemented op(s): l2_pool2d
l2Pool2d float16 4D tensor options.dilations with options.stridescpuunimplemented op(s): l2_pool2d

lstm.https.any.js

0 passed 28 skipped
Skipped (28)
TestVariantReason
lstm float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialCellStatecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm
lstm float32 tensors steps=1 with all optionscpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=2, batchSize=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with all optionscpuunimplemented op(s): lstm
lstm float32 tensors steps=2 with bidirectionscpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBiascpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.initialCellStatecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm
lstm float16 tensors steps=1 with all optionscpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with all optionscpuunimplemented op(s): lstm
lstm float16 tensors steps=2 with bidirectionscpuunimplemented op(s): lstm

gru.https.any.js

0 passed 24 skipped
Skipped (24)
TestVariantReason
gru float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu'] and reset_after=truecpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.layout='rzn'cpuunimplemented op(s): gru
gru float32 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): gru
gru float32 tensors steps=1 all optionscpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): gru
gru float32 tensors steps=2 with options.bias, options.recurrentBias, options.direction='both' and options.returnSequence=truecpuunimplemented op(s): gru
gru float32 tensors steps=2 with all optionscpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias and options.activations=['relu', 'relu'] and resetAfter=truecpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.direction='forward'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.layout='rzn'cpuunimplemented op(s): gru
gru float16 tensors steps=1 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.initialHiddenStatecpuunimplemented op(s): gru
gru float16 tensors steps=1 all optionscpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and options.direction='backward'cpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and explicit options.returnSequence=falsecpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='backward', options.activations=['relu', 'relu'] and options.returnSequence=truecpuunimplemented op(s): gru
gru float16 tensors steps=2 with options.bias, options.recurrentBias, options.direction='both' and options.returnSequence=truecpuunimplemented op(s): gru
gru float16 tensors steps=2 with all optionscpuunimplemented op(s): gru

lstm_cell.https.any.js

0 passed 10 skipped
Skipped (10)
TestVariantReason
lstmCell float32 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm_cell
lstmCell float32 tensors with all optionscpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu', 'relu']cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.peepholeWeightcpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and explicit options.layout='iofg'cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu', 'relu'] and options.layout='ifgo'cpuunimplemented op(s): lstm_cell
lstmCell float16 tensors with all optionscpuunimplemented op(s): lstm_cell

gru_cell.https.any.js

0 passed 8 skipped
Skipped (8)
TestVariantReason
gruCell float32 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru_cell
gruCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru_cell
gruCell float32 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and and options.layout='rzn'cpuunimplemented op(s): gru_cell
gruCell float32 tensors with all optionscpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias and options.activations=['relu', 'relu']cpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and explicit options.layout='zrn'cpuunimplemented op(s): gru_cell
gruCell float16 tensors with options.bias, options.recurrentBias, options.activations=['relu', 'relu'] and and options.layout='rzn'cpuunimplemented op(s): gru_cell
gruCell float16 tensors with all optionscpuunimplemented op(s): gru_cell

byob_readtensor.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in byob_readtensor.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in byob_readtensor.https.any.js

File parse error: No <name>Tests array found in constant-reshape-optimization.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in constant-reshape-optimization.https.any.js

graph_devices.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in graph_devices.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in graph_devices.https.any.js

File parse error: No <name>Tests array found in inputs-are-not-modified.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in inputs-are-not-modified.https.any.js

File parse error: No <name>Tests array found in inputs-with-special-names.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in inputs-with-special-names.https.any.js

File parse error: No <name>Tests array found in operations-with-special-names.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in operations-with-special-names.https.any.js

parallel-dispatch.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in parallel-dispatch.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in parallel-dispatch.https.any.js

scalars.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in scalars.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in scalars.https.any.js

File parse error: No <name>Tests array found in shared_arraybuffer_constant.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in shared_arraybuffer_constant.https.any.js

tensor.https.any.js

0 passed 1 skipped

File parse error: No <name>Tests array found in tensor.https.any.js

Skipped (1)
TestVariantReason
<file>cpuNo <name>Tests array found in tensor.https.any.js

abs.https.any.js

20 passed

add.https.any.js

23 passed

cast.https.any.js

49 passed

ceil.https.any.js

14 passed

clamp.https.any.js

51 passed

cos.https.any.js

14 passed

div.https.any.js

20 passed

elu.https.any.js

20 passed

equal.https.any.js

24 passed

erf.https.any.js

14 passed

exp.https.any.js

14 passed

floor.https.any.js

14 passed

gelu.https.any.js

13 passed

gemm.https.any.js

46 passed

log.https.any.js

14 passed

max.https.any.js

20 passed

min.https.any.js

20 passed

mul.https.any.js

20 passed

neg.https.any.js

19 passed

pad.https.any.js

25 passed

pow.https.any.js

20 passed

prelu.https.any.js

27 passed

sin.https.any.js

14 passed

slice.https.any.js

19 passed

split.https.any.js

20 passed

sqrt.https.any.js

14 passed

sub.https.any.js

26 passed

tan.https.any.js

14 passed

tanh.https.any.js

12 passed

where.https.any.js

34 passed