ONNX Backends for Shape Inference#
Backend class: OnnxInferenceBackendShape
.
<<<
import unittest
import sys
from datetime import datetime
from contextlib import redirect_stdout, redirect_stderr
from io import StringIO
from onnx.backend.test import BackendTest
from onnx import __version__ as onnx_version
from onnxruntime import __version__ as ort_version
from numpy import __version__ as npy_version
import mlprodict.onnxrt.backend_shape as backend
back_test = BackendTest(backend, __name__)
back_test.include('.*_cpu')
back_test.exclude('.*_blvc_.*')
back_test.exclude('.*_densenet_.*')
back_test.exclude('.*_densenet121_.*')
back_test.exclude('.*_inception_.*')
back_test.exclude('.*_resnet50_.*')
back_test.exclude('.*_shufflenet_.*')
back_test.exclude('.*_squeezenet_.*')
back_test.exclude('.*_vgg19_.*')
back_test.exclude('.*_zfnet512_.*')
globals().update(back_test.enable_report().test_cases)
print('---------------------------------')
print('python', sys.version)
print('onnx', onnx_version)
print('onnxruntime', ort_version)
print('numpy', npy_version)
print('---------------------------------')
print(datetime.now(), "BEGIN")
print('---------------------------------')
buffer = StringIO()
if True:
with redirect_stdout(buffer):
with redirect_stderr(buffer):
res = unittest.main(verbosity=2, exit=False)
else:
res = unittest.main(verbosity=2, exit=False)
testsRun = res.result.testsRun
errors = len(res.result.errors)
skipped = len(res.result.skipped)
unexpectedSuccesses = len(res.result.unexpectedSuccesses)
expectedFailures = len(res.result.expectedFailures)
print('---------------------------------')
print(datetime.now(), "END")
print('---------------------------------')
print("testsRun=%d errors=%d skipped=%d" % (testsRun, errors, skipped))
print("unexpectedSuccesses=%d expectedFailures=%d" % (
unexpectedSuccesses, expectedFailures))
ran = testsRun - skipped
print("ratio=%f" % (1 - errors * 1.0 / ran))
print('---------------------------------')
lines = buffer.getvalue().split('\n')
print("\n".join(line for line in lines
if "skipped 'no matched include pattern'" not in line))
>>>
---------------------------------
python 3.9.1 (default, Jan 18 2021, 16:35:58)
[GCC 8.3.0]
onnx 1.11.0
onnxruntime 1.11.0
numpy 1.21.5
---------------------------------
2022-04-05 07:16:00.223218 BEGIN
---------------------------------
---------------------------------
2022-04-05 07:16:10.971090 END
---------------------------------
testsRun=2026 errors=781 skipped=1021
unexpectedSuccesses=0 expectedFailures=0
ratio=0.222886
---------------------------------
test_abs_cpu (__main__.OnnxBackendNodeModelTest) ... /var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/npy/xop.py:16: DeprecationWarning: Please use `coo_matrix` from the `scipy.sparse` namespace, the `scipy.sparse.coo` namespace is deprecated.
from scipy.sparse.coo import coo_matrix
/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op_numpy_helper.py:8: DeprecationWarning: Please use `coo_matrix` from the `scipy.sparse` namespace, the `scipy.sparse.coo` namespace is deprecated.
from scipy.sparse.coo import coo_matrix
/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py:188: DeprecationWarning: `np.object` is a deprecated alias for the builtin `object`. To silence this warning, use `object` by itself. Doing this will not modify any behavior and is safe.
Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations
if ref_outputs[i].dtype == np.object:
ok
test_acos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_acos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_acosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_acosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_adagrad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_adam_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_add_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_add_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_add_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_and2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_and3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_and4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_and_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_and_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_and_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_and_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_and_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_argmax_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmax_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_argmin_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_asin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_asin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_asinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_asinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_atan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_atan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_atanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_atanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_averagepool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_precomputed_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_averagepool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_basic_conv_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_basic_conv_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_basic_convinteger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_batchnorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_batchnorm_epsilon_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_batchnorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_batchnorm_example_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_left_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_left_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_left_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_right_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_right_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_bitshift_right_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cast_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_DOUBLE_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_DOUBLE_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_FLOAT16_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_FLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_FLOAT_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_FLOAT_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_castlike_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_castlike_STRING_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_ceil_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_celu_cpu (__main__.OnnxBackendNodeModelTest) ... /var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_celu.py:47: DeprecationWarning: `np.float` is a deprecated alias for the builtin `float`. To silence this warning, use `float` by itself. Doing this will not modify any behavior and is safe. If you specifically wanted the numpy scalar type, use `np.float64` here.
Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations
lambda x: pycelu(x, self.alpha), otypes=[numpy.float])
ok
test_celu_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_clip_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_int8_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_int8_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_int8_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_default_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_outbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_clip_splitbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_compress_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_compress_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_compress_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_compress_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_1d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_1d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_concat_3d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_constant_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_constantofshape_float_ones_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_constantofshape_int_shape_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_constantofshape_int_zeros_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_conv_with_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_conv_with_strides_and_asymmetric_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_conv_with_strides_no_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_conv_with_strides_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convinteger_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convinteger_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_kernel_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_convtranspose_with_kernel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_cos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_cosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_cosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_cumsum_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_1d_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_1d_reverse_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_1d_reverse_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_cumsum_2d_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_depthtospace_crd_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_depthtospace_crd_mode_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_depthtospace_dcr_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_depthtospace_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dequantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dequantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_det_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_det_nd_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_div_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_div_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_div_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_div_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dropout_default_mask_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dropout_default_old_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dropout_default_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dropout_random_old_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_max_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_max_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_min_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_dynamicquantizelinear_min_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_einsum_batch_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_einsum_batch_matmul_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_einsum_inner_prod_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_einsum_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_einsum_transpose_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_elu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_elu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_elu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_erf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_exp_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_exp_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_expand_dim_changed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_expand_dim_unchanged_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_eyelike_populate_off_main_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_eyelike_with_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_eyelike_without_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_negative_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_negative_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_negative_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_flatten_negative_axis4_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_floor_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_floor_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_gather_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_2d_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_elements_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_elements_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_elements_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gather_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gathernd_example_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gathernd_example_int32_batch_dim1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gathernd_example_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_all_attributes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_alpha_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_beta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_matrix_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_no_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_scalar_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_single_elem_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_default_zero_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_transposeA_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gemm_transposeB_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_globalaveragepool_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_globalaveragepool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_globalmaxpool_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_globalmaxpool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_greater_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_greater_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_greater_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_greater_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_greater_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_greater_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_gridsample_aligncorners_true_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_bicubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_bilinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_border_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_reflection_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gridsample_zeros_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gru_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gru_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_gru_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_hardmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardmax_one_hot_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardsigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardsigmoid_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardsigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardswish_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_hardswish_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_identity_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_if_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_if_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_if_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_instancenorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_instancenorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_isinf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_isinf_negative_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_isinf_positive_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_isnan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_leakyrelu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_leakyrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_leakyrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_less_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_less_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_less_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_less_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_less_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_less_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_log_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_log_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_logsoftmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_example_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_example_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_logsoftmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_logsoftmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_loop11_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_loop13_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lrn_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lrn_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lstm_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lstm_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_lstm_with_peepholes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_matmul_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_matmul_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_matmul_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_matmulinteger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_max_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_max_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_max_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_maxpool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_2d_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_with_argmax_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxpool_with_argmax_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_maxunpool_export_without_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mean_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mean_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mean_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_min_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_min_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_min_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_int64_fmod_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_mod_mixed_sign_float16_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_mod_mixed_sign_float32_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_mod_mixed_sign_float64_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_mod_mixed_sign_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_mixed_sign_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_mixed_sign_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_mixed_sign_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mod_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mul_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mul_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mul_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mul_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_mvn_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_mvn_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_neg_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NC_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NC_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_no_weight_reduction_mean_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_sum_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_center_point_box_format_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_flipped_coordinates_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_identical_boxes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_limit_output_size_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_single_box_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_suppress_by_IOU_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_two_batches_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonmaxsuppression_two_classes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_nonzero_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_not_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_not_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_not_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_onehot_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_onehot_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_onehot_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_onehot_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_optional_get_element_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_optional_has_element_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_optional_has_element_empty_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_or2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_or3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_or4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_or_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_or_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_or_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_or_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_or_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_pow_bcast_array_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_bcast_scalar_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_pow_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_pow_types_float32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_float32_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_float32_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_float32_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_float_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_int32_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_int32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_pow_types_int64_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_pow_types_int64_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_pow_types_int_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_prelu_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_prelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_qlinearconv_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_qlinearmatmul_2D_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_qlinearmatmul_3D_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_quantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_quantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_range_float_type_positive_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_range_float_type_positive_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_range_int32_type_negative_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_range_int32_type_negative_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reciprocal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_reciprocal_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_empty_axes_input_noop_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_empty_axes_input_noop_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_relu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_reshape_allowzero_reordered_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_negative_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_one_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_reduced_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_reordered_all_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_reordered_last_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_zero_and_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reshape_zero_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_downsample_sizes_nearest_tf_half_pixel_for_nn_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reversesequence_batch_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_reversesequence_time_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_rnn_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_roialign_aligned_false_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_roialign_aligned_true_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_round_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_scan9_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scan_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatter_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatternd_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1_mean_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_no_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_weights_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_weights_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_weights_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_none_weights_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_sum_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sce_sum_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_selu_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_selu_default_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_selu_example_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_clip_end_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_clip_start_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_end_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_start_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_start_1_end_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_start_1_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shape_start_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shrink_hard_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_shrink_soft_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sign_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_simple_rnn_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_simple_rnn_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_size_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_size_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_default_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_default_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_end_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_neg_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_slice_start_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
test_softmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_softmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softplus_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softplus_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softsign_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_softsign_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_spacetodepth_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_spacetodepth_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_equal_parts_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_equal_parts_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_variable_parts_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_variable_parts_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_variable_parts_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_split_zero_size_splits_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sqrt_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sqrt_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_squeeze_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_squeeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_export_monday_casesensintive_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_export_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_export_monday_casesensintive_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_export_monday_empty_output_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_export_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_strnormalizer_nostopwords_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sub_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sub_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sub_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sub_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_sum_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sum_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_sum_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tfidfvectorizer_tf_batch_onlybigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_batch_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_only_bigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_onlybigrams_levelempty_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tfidfvectorizer_tf_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_thresholdedrelu_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_thresholdedrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_thresholdedrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tile_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tile_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_top_k_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_top_k_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_top_k_smallest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_zero_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_training_dropout_zero_ratio_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_4_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_all_permutations_5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_transpose_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_tril_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_one_row_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_out_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_tril_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_triu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_one_row_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_out_neg_out_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_triu_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unique_not_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unique_sorted_with_axis_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unique_sorted_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unique_sorted_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unique_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_axis_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_three_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_two_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_unsqueeze_unsorted_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_upsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_where_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_where_long_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_xor2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_xor3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_xor4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_xor_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_xor_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_xor_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_xor_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
test_xor_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ConstantPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_pad1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_pad1size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_pad2_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_pad2size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_depthwise_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_depthwise_padded_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_depthwise_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_depthwise_with_multiplier_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_groups_thnn_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv2d_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_dilated_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Conv3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ConvTranspose2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ConvTranspose2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_Embedding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Embedding_sparse_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_LeakyReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_LeakyReLU_with_negval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Linear_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_LogSoftmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_MaxPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool1d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool2d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_MaxPool3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PixelShuffle_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_ReflectionPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_ReplicationPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_SELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... FAIL
test_Sigmoid_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_Softmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_Softmin_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_Softplus_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_Tanh_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_ZeroPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
test_log_softmax_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_log_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_softmax_functional_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
test_operator_add_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_add_size1_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_add_size1_right_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_add_size1_singleton_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_basic_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_chunk_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_clip_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_concat2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_conv_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_convtranspose_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_exp_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_flatten_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_index_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_max_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_maxpool_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_min_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_pad_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_permute2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_pow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... /var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_pow.py:19: RuntimeWarning: invalid value encountered in power
return (numpy.power(a, b).astype(a.dtype), )
ok
test_operator_reduced_mean_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_reduced_mean_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_reduced_sum_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_reduced_sum_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_repeat_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_repeat_dim_overflow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_selu_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... FAIL
test_operator_sqrt_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... /var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_sqrt.py:22: RuntimeWarning: invalid value encountered in sqrt
return (numpy.sqrt(x), )
ok
test_operator_symbolic_override_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_symbolic_override_nested_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_operator_view_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
test_bvlc_alexnet_cpu (__main__.OnnxBackendRealModelTest) ... ERROR
test_densenet121_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
test_densenet121_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
test_inception_v1_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
test_inception_v1_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
test_inception_v2_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
test_inception_v2_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
test_resnet50_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
test_resnet50_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
test_shufflenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
test_shufflenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
test_squeezenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
test_squeezenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
test_vgg19_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
test_vgg19_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
test_zfnet512_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
test_zfnet512_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
test_expand_shape_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_expand_shape_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_expand_shape_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_expand_shape_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model5_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model6_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model7_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sequence_model8_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_shrink_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_sign_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
test_single_relu_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
test_strnorm_model_monday_casesensintive_lower_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_strnorm_model_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_strnorm_model_monday_casesensintive_upper_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_strnorm_model_monday_empty_output_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_strnorm_model_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
test_strnorm_model_nostopwords_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
======================================================================
ERROR: test_adagrad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Adagrad' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Adagrad' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_adam_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Adam' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Adam' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_add_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 37, in shape_add
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_and_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 42, in shape_and
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [5], dtype('bool')).
======================================================================
ERROR: test_and_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 42, in shape_and
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [4, 5], dtype('bool')).
======================================================================
ERROR: test_and_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 42, in shape_and
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [5, 6], dtype('bool')).
======================================================================
ERROR: test_and_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 42, in shape_and
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [4, 5, 6], dtype('bool')).
======================================================================
ERROR: test_argmax_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmax_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_argmin_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_1d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_precomputed_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_averagepool_3d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_basic_conv_with_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_basic_conv_without_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_basic_convinteger_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ConvInteger' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_batchnorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_batchnorm_epsilon_training_mode_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_batchnorm_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_batchnorm_example_training_mode_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_left_uint32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_left_uint64_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_left_uint8_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_right_uint32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_right_uint64_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bitshift_right_uint8_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_cast_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 605, in to_sequence
outputs[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_cast_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cast_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_castlike_DOUBLE_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_DOUBLE_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_FLOAT16_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_FLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 362, in _var_as_dict
elem_type = _elem_type_as_str(t.elem_type)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 292, in _elem_type_as_str
raise NotImplementedError( # pragma: no cover
NotImplementedError: elem_type '16' is unknown
fields:
['__abs__',
'__add__',
'__and__',
'__bool__',
'__ceil__',
'__class__',
'__delattr__',
'__dir__',
'__divmod__',
'__doc__',
'__eq__',
'__float__',
'__floor__',
'__floordiv__',
'__format__',
'__ge__',
'__getattribute__',
'__getnewargs__',
'__gt__',
'__hash__',
'__index__',
'__init__',
'__init_subclass__',
'__int__',
'__invert__',
'__le__',
'__lshift__',
'__lt__',
'__mod__',
'__mul__',
'__ne__',
'__neg__',
'__new__',
'__or__',
'__pos__',
'__pow__',
'__radd__',
'__rand__',
'__rdivmod__',
'__reduce__',
'__reduce_ex__',
'__repr__',
'__rfloordiv__',
'__rlshift__',
'__rmod__',
'__rmul__',
'__ror__',
'__round__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__setattr__',
'__sizeof__',
'__str__',
'__sub__',
'__subclasshook__',
'__truediv__',
'__trunc__',
'__xor__',
'as_integer_ratio',
'bit_length',
'conjugate',
'denominator',
'from_bytes',
'imag',
'numerator',
'real',
'to_bytes']
-----
<class 'int'>.
======================================================================
ERROR: test_castlike_FLOAT_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_FLOAT_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_castlike_STRING_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_celu_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_compress_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_compress_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_compress_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_compress_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_1d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_1d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_concat_3d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_constant_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_constantofshape_float_ones_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_constantofshape_int_shape_zero_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_constantofshape_int_zeros_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_conv_with_autopad_same_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_conv_with_strides_and_asymmetric_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_conv_with_strides_no_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_conv_with_strides_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convinteger_with_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ConvInteger' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_convinteger_without_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ConvInteger' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_convtranspose_1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_autopad_same_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_dilations_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_kernel_shape_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_convtranspose_with_kernel_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_1d_exclusive_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_1d_reverse_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_1d_reverse_exclusive_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_cumsum_2d_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_depthtospace_crd_mode_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'DepthToSpace' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_depthtospace_crd_mode_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'DepthToSpace' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_depthtospace_dcr_mode_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'DepthToSpace' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_depthtospace_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'DepthToSpace' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_dequantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'DequantizeLinear' domain='', looking for 'shape_dequantizelinear' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dequantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'DequantizeLinear' domain='', looking for 'shape_dequantizelinear' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_det_2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 320, in run
outputs = list(prepared_model.run(inputs))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
outs = self._session.run(feeds)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 47, in run
raise RuntimeError(
RuntimeError: Incompatible shapes ShapeResult('y', [], dtype('float32')) and (1,) for output 'y'.
======================================================================
ERROR: test_div_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 47, in shape_div
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_dropout_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dropout_default_mask_ratio_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dropout_default_old_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dropout_default_ratio_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dropout_random_old_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_max_adjusted_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_max_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_min_adjusted_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_dynamicquantizelinear_min_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_einsum_batch_diagonal_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_einsum_batch_matmul_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_einsum_inner_prod_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_einsum_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_einsum_transpose_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 52, in shape_equal
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('int32')) and ShapeResult('y', [5], dtype('int32')).
======================================================================
ERROR: test_expand_dim_changed_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_expand_dim_unchanged_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_eyelike_populate_off_main_diagonal_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_eyelike_with_dtype_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_eyelike_without_dtype_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_axis0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_axis1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_axis2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_axis3_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_negative_axis1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_negative_axis2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_negative_axis3_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_flatten_negative_axis4_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_2d_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_elements_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_elements_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_elements_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gather_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gathernd_example_float32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GatherND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gathernd_example_int32_batch_dim1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GatherND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gathernd_example_int32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GatherND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gemm_all_attributes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_alpha_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_beta_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_matrix_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_no_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_scalar_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_single_elem_vector_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_vector_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_default_zero_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_transposeA_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gemm_transposeB_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_globalaveragepool_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'GlobalAveragePool' domain='', looking for 'shape_globalaveragepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_globalaveragepool_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'GlobalAveragePool' domain='', looking for 'shape_globalaveragepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_globalmaxpool_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GlobalMaxPool' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_globalmaxpool_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GlobalMaxPool' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_greater_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 57, in shape_greater
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_greater_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 62, in shape_greaterorequal
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_greater_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 57, in shape_greater
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_gridsample_aligncorners_true_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_bicubic_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_bilinear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_border_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_reflection_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gridsample_zeros_padding_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GridSample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GRU' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gru_defaults_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GRU' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gru_seq_length_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GRU' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gru_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'GRU' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n sequence_type {\n elem_type {\n tensor_type {\n elem_type: 1\n shape {\n dim {\n dim_value: 5\n }\n }\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 320, in run
outputs = list(prepared_model.run(inputs))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
outs = self._session.run(feeds)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 49, in run
shapes[k], v.shape, k))
AttributeError: 'list' object has no attribute 'shape'
======================================================================
ERROR: test_if_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'If' domain='', looking for 'shape_if' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_if_opt_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 605, in to_sequence
outputs[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n sequence_type {\n elem_type {\n tensor_type {\n elem_type: 1\n shape {\n dim {\n dim_value: 5\n }\n }\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_if_seq_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'If' domain='', looking for 'shape_if' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_instancenorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'InstanceNormalization' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_instancenorm_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'InstanceNormalization' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_less_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 67, in shape_less
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_less_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 72, in shape_lessorequal
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_less_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 67, in shape_less
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_logsoftmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_example_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_logsoftmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_loop11_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Loop' domain='', looking for 'shape_loop' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_loop13_seq_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Loop' domain='', looking for 'shape_loop' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n sequence_type {\n elem_type {\n tensor_type {\n elem_type: 1\n shape {\n }\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_lrn_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LRN' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_lrn_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LRN' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LSTM' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_lstm_defaults_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LSTM' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_lstm_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LSTM' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_lstm_with_peepholes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LSTM' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_matmul_2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_matmul_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_matmul_4d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_matmulinteger_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'MatMulInteger' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_max_one_input_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 77, in shape_max
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 24, in _element_wise
y = known_shapes[node.input[1]]
IndexError: list index (1) out of range
======================================================================
ERROR: test_maxpool_1d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_dilations_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_2d_uint8_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_3d_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_with_argmax_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxpool_with_argmax_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'MaxUnpool' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_maxunpool_export_without_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'MaxUnpool' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_mean_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_mean_one_input_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_mean_two_inputs_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_min_one_input_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 82, in shape_min
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 24, in _element_wise
y = known_shapes[node.input[1]]
IndexError: list index (1) out of range
======================================================================
ERROR: test_momentum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Momentum' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Momentum' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_mul_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 92, in shape_mul
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_mvn_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_mvn_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Momentum' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nllloss_NC_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NC_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_no_weight_reduction_mean_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_nonmaxsuppression_center_point_box_format_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_flipped_coordinates_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_identical_boxes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_limit_output_size_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_single_box_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_suppress_by_IOU_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_two_batches_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonmaxsuppression_two_classes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonMaxSuppression' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_nonzero_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'NonZero' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_onehot_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'OneHot' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_onehot_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'OneHot' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_onehot_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'OneHot' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_onehot_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'OneHot' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_optional_get_element_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n tensor_type {\n elem_type: 1\n shape {\n dim {\n dim_value: 4\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n sequence_type {\n elem_type {\n tensor_type {\n elem_type: 6\n shape {\n dim {\n dim_value: 4\n }\n }\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_optional_has_element_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n tensor_type {\n elem_type: 1\n shape {\n dim {\n dim_value: 4\n }\n }\n }\n}\n'.
======================================================================
ERROR: test_optional_has_element_empty_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 419, in _var_as_dict
dtype['optional'] = _var_as_dict(optional)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Optional'> value is 'elem_type {\n tensor_type {\n elem_type: 6\n shape {\n }\n }\n}\n'.
======================================================================
ERROR: test_or_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 97, in shape_or
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [5], dtype('bool')).
======================================================================
ERROR: test_or_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 97, in shape_or
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [4, 5], dtype('bool')).
======================================================================
ERROR: test_or_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 97, in shape_or
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [5, 6], dtype('bool')).
======================================================================
ERROR: test_or_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 97, in shape_or
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [4, 5, 6], dtype('bool')).
======================================================================
ERROR: test_pow_bcast_array_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [2, 3], dtype('float32')) and ShapeResult('y', [3], dtype('float32')).
======================================================================
ERROR: test_pow_bcast_scalar_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [], dtype('float32')).
======================================================================
ERROR: test_pow_types_float32_int32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [3], dtype('int32')) (dtypes).
======================================================================
ERROR: test_pow_types_float32_int64_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [3], dtype('int64')) (dtypes).
======================================================================
ERROR: test_pow_types_float32_uint32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [3], dtype('uint32')) (dtypes).
======================================================================
ERROR: test_pow_types_float32_uint64_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [3], dtype('uint64')) (dtypes).
======================================================================
ERROR: test_pow_types_float_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('int64')) and ShapeResult('y', [3], dtype('float32')) (dtypes).
======================================================================
ERROR: test_pow_types_int32_float32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('int32')) and ShapeResult('y', [3], dtype('float32')) (dtypes).
======================================================================
ERROR: test_pow_types_int64_float32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('int64')) and ShapeResult('y', [3], dtype('float32')) (dtypes).
======================================================================
ERROR: test_pow_types_int_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 102, in shape_pow
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 360, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Cannot broadcast shapes ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [3], dtype('int64')) (dtypes).
======================================================================
ERROR: test_prelu_broadcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'PRelu' domain='', looking for 'shape_prelu' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_prelu_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'PRelu' domain='', looking for 'shape_prelu' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_qlinearconv_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'QLinearConv' domain='', looking for 'shape_qlinearconv' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_qlinearmatmul_2D_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'QLinearMatMul' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_qlinearmatmul_3D_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'QLinearMatMul' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_quantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'QuantizeLinear' domain='', looking for 'shape_quantizelinear' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_quantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'QuantizeLinear' domain='', looking for 'shape_quantizelinear' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_range_float_type_positive_delta_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_range_float_type_positive_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 317, in preprocess_parameters
sess = rt_class(v['value'], runtime=runtime,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.ValueInfoProto'> value is 'name: "prev"\n'.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 221, in setup_runtime
self.preprocess_parameters(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 323, in preprocess_parameters
raise RuntimeError(
RuntimeError: Unable to instantiate a node of type 'Loop' and name ''.
======================================================================
ERROR: test_range_int32_type_negative_delta_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 77, in shape_dispatch
sess = rt_class(onnx_schema.function_body)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_range_int32_type_negative_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 317, in preprocess_parameters
sess = rt_class(v['value'], runtime=runtime,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 187, in _init
self.graph_ = self.to_sequence(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 595, in to_sequence
variables[obj.name] = _var_as_dict(obj)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 488, in _var_as_dict
raise NotImplementedError( # pragma: no cover
NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.ValueInfoProto'> value is 'name: "prev"\n'.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 221, in setup_runtime
self.preprocess_parameters(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 323, in preprocess_parameters
raise RuntimeError(
RuntimeError: Unable to instantiate a node of type 'Loop' and name ''.
======================================================================
ERROR: test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL1' domain='', looking for 'shape_reducel1' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceL2' domain='', looking for 'shape_reducel2' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSum' domain='', looking for 'shape_reducelogsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSum' domain='', looking for 'shape_reducelogsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSum' domain='', looking for 'shape_reducelogsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSum' domain='', looking for 'shape_reducelogsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSumExp' domain='', looking for 'shape_reducelogsumexp' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceLogSum' domain='', looking for 'shape_reducelogsum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_empty_axes_input_noop_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_empty_axes_input_noop_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSumSquare' domain='', looking for 'shape_reducesumsquare' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_allowzero_reordered_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 213, in _init
raise RuntimeError( # pragma: no cover
RuntimeError: Wrong ONNX file, one input or output has an empty shape: name: "data"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 0
}
dim {
dim_value: 3
}
dim {
dim_value: 4
}
}
}
}
.
======================================================================
ERROR: test_reshape_extended_dims_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_negative_dim_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_negative_extended_dims_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_one_dim_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_reduced_dims_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_reordered_all_dims_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_reordered_last_dims_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_zero_and_negative_dim_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_reshape_zero_dim_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_downsample_sizes_nearest_tf_half_pixel_for_nn_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Resize' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_reversesequence_batch_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ReverseSequence' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_reversesequence_time_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ReverseSequence' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_rnn_seq_length_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 118, in load_op
return cl(onnx_node, desc=desc, runtme=runtime, **options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 156, in __init__
CommonRNN.__init__(self, onnx_node, desc=desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 40, in __init__
self.f1 = self.choose_act(self.activations[0],
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 57, in choose_act
raise RuntimeError( # pragma: no cover
RuntimeError: Unknown activation function 'tanh'.
======================================================================
ERROR: test_roialign_aligned_false_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'RoiAlign' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_roialign_aligned_true_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'RoiAlign' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_scan9_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Scan' domain='', looking for 'shape_scan' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scan_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Scan' domain='', looking for 'shape_scan' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_scatter_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Scatter' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_scatter_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Scatter' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ScatterND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_scatternd_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ScatterND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ScatterND' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sce_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1_mean_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_no_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_weights_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_weights_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_weights_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_none_weights_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_sum_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_sum_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sce_sum_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SequenceInsert' domain='', looking for 'shape_sequenceinsert' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SequenceInsert' domain='', looking for 'shape_sequenceinsert' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_clip_end_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_clip_start_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_end_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_start_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_start_1_end_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_start_1_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shape_start_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_shrink_hard_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Shrink' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_shrink_soft_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Shrink' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 118, in load_op
return cl(onnx_node, desc=desc, runtme=runtime, **options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 156, in __init__
CommonRNN.__init__(self, onnx_node, desc=desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 40, in __init__
self.f1 = self.choose_act(self.activations[0],
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 57, in choose_act
raise RuntimeError( # pragma: no cover
RuntimeError: Unknown activation function 'tanh'.
======================================================================
ERROR: test_simple_rnn_defaults_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 118, in load_op
return cl(onnx_node, desc=desc, runtme=runtime, **options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 156, in __init__
CommonRNN.__init__(self, onnx_node, desc=desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 40, in __init__
self.f1 = self.choose_act(self.activations[0],
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 57, in choose_act
raise RuntimeError( # pragma: no cover
RuntimeError: Unknown activation function 'tanh'.
======================================================================
ERROR: test_simple_rnn_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 118, in load_op
return cl(onnx_node, desc=desc, runtme=runtime, **options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 156, in __init__
CommonRNN.__init__(self, onnx_node, desc=desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 40, in __init__
self.f1 = self.choose_act(self.activations[0],
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_rnn.py", line 57, in choose_act
raise RuntimeError( # pragma: no cover
RuntimeError: Unknown activation function 'tanh'.
======================================================================
ERROR: test_size_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Size' domain='', looking for 'shape_size' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_size_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Size' domain='', looking for 'shape_size' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_default_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_default_steps_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_end_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_neg_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_neg_steps_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_slice_start_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 213, in _init
raise RuntimeError( # pragma: no cover
RuntimeError: Wrong ONNX file, one input or output has an empty shape: name: "y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 20
}
dim {
dim_value: 0
}
dim {
dim_value: 5
}
}
}
}
.
======================================================================
ERROR: test_softmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_softplus_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Softplus' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_softplus_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Softplus' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_softsign_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Softsign' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_softsign_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Softsign' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_spacetodepth_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SpaceToDepth' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_spacetodepth_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SpaceToDepth' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_split_equal_parts_1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_equal_parts_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_variable_parts_1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_variable_parts_2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_variable_parts_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_split_zero_size_splits_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_squeeze_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Squeeze' domain='', looking for 'shape_squeeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_squeeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Squeeze' domain='', looking for 'shape_squeeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_export_monday_casesensintive_lower_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_export_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_export_monday_casesensintive_upper_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_export_monday_empty_output_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_export_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnormalizer_nostopwords_nochangecase_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sub_bcast_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 107, in shape_sub
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('y', [5], dtype('float32')).
======================================================================
ERROR: test_sum_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sum_one_input_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sum_two_inputs_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_batch_onlybigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_batch_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_only_bigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_onlybigrams_levelempty_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tfidfvectorizer_tf_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_thresholdedrelu_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ThresholdedRelu' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_thresholdedrelu_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ThresholdedRelu' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_thresholdedrelu_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'ThresholdedRelu' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_tile_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Tile' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_tile_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Tile' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_top_k_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_top_k_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_top_k_smallest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_zero_ratio_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_training_dropout_zero_ratio_mask_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_3_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_4_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_all_permutations_5_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_transpose_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_tril_zero_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 213, in _init
raise RuntimeError( # pragma: no cover
RuntimeError: Wrong ONNX file, one input or output has an empty shape: name: "x"
type {
tensor_type {
elem_type: 7
shape {
dim {
dim_value: 3
}
dim {
dim_value: 0
}
dim {
dim_value: 5
}
}
}
}
.
======================================================================
ERROR: test_triu_zero_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 213, in _init
raise RuntimeError( # pragma: no cover
RuntimeError: Wrong ONNX file, one input or output has an empty shape: name: "x"
type {
tensor_type {
elem_type: 7
shape {
dim {
dim_value: 0
}
dim {
dim_value: 5
}
}
}
}
.
======================================================================
ERROR: test_unique_not_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Unique' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_unique_sorted_with_axis_3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Unique' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_unique_sorted_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Unique' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_unique_sorted_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Unique' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_unique_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Unique' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_unsqueeze_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_axis_2_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_axis_3_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_three_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_two_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_unsqueeze_unsorted_axes_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_upsample_nearest_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Upsample' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_where_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Where' domain='', looking for 'shape_where' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_where_long_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Where' domain='', looking for 'shape_where' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_xor_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 112, in shape_xor
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [5], dtype('bool')).
======================================================================
ERROR: test_xor_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 112, in shape_xor
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('bool')) and ShapeResult('y', [4, 5], dtype('bool')).
======================================================================
ERROR: test_xor_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 112, in shape_xor
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [5, 6], dtype('bool')).
======================================================================
ERROR: test_xor_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 112, in shape_xor
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5, 6], dtype('bool')) and ShapeResult('y', [4, 5, 6], dtype('bool')).
======================================================================
ERROR: test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_ConstantPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 238, in infer_shapes
res = self._infer_shapes(*args, **kwargs)
TypeError: _infer_shapes() missing 1 required positional argument: 'pads'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 438, in _set_shape_inference_runtime
res = self.ops_.infer_shapes(*args)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 240, in infer_shapes
raise TypeError( # pragma: no cover
TypeError: Issues with (operator 'Pad') and shapes
ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0')
----args
(ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0'),)
------kwargs
{}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1410, in _set_shape_inference_runtime
s = node._set_shape_inference_runtime(values)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 440, in _set_shape_inference_runtime
raise TypeError(
TypeError: Unable to call infer_shapes with 1 arguments for class 'Pad' (<bound method OpRun.infer_shapes of <mlprodict.onnxrt.ops_cpu.op_pad.Pad object at 0x7fbe73914be0>>)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 282, in _init
self.shapes_ = self._set_shape_inference_runtime()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1421, in _set_shape_inference_runtime
raise RuntimeError("Unable to infer shape of node {}\n{}".format(
RuntimeError: Unable to infer shape of node 0
0 --> Onnx-Pad(0) -> 1
======================================================================
ERROR: test_Conv1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_pad1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_pad1size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_pad2_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_pad2size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_depthwise_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_depthwise_padded_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_depthwise_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_depthwise_with_multiplier_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_groups_thnn_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv2d_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_dilated_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Conv3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_ConvTranspose2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_ConvTranspose2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Embedding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Embedding_sparse_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_Linear_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_MaxPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool1d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool2d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_MaxPool3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_PixelShuffle_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_ReflectionPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 238, in infer_shapes
res = self._infer_shapes(*args, **kwargs)
TypeError: _infer_shapes() missing 1 required positional argument: 'pads'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 438, in _set_shape_inference_runtime
res = self.ops_.infer_shapes(*args)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 240, in infer_shapes
raise TypeError( # pragma: no cover
TypeError: Issues with (operator 'Pad') and shapes
ShapeObject((2, 3, 8, 8), dtype=numpy.float32, name='0')
----args
(ShapeObject((2, 3, 8, 8), dtype=numpy.float32, name='0'),)
------kwargs
{}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1410, in _set_shape_inference_runtime
s = node._set_shape_inference_runtime(values)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 440, in _set_shape_inference_runtime
raise TypeError(
TypeError: Unable to call infer_shapes with 1 arguments for class 'Pad' (<bound method OpRun.infer_shapes of <mlprodict.onnxrt.ops_cpu.op_pad.Pad object at 0x7fbe7390bf70>>)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 282, in _init
self.shapes_ = self._set_shape_inference_runtime()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1421, in _set_shape_inference_runtime
raise RuntimeError("Unable to infer shape of node {}\n{}".format(
RuntimeError: Unable to infer shape of node 0
0 --> Onnx-Pad(0) -> 1
======================================================================
ERROR: test_ReplicationPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 238, in infer_shapes
res = self._infer_shapes(*args, **kwargs)
TypeError: _infer_shapes() missing 1 required positional argument: 'pads'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 438, in _set_shape_inference_runtime
res = self.ops_.infer_shapes(*args)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 240, in infer_shapes
raise TypeError( # pragma: no cover
TypeError: Issues with (operator 'Pad') and shapes
ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0')
----args
(ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0'),)
------kwargs
{}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1410, in _set_shape_inference_runtime
s = node._set_shape_inference_runtime(values)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 440, in _set_shape_inference_runtime
raise TypeError(
TypeError: Unable to call infer_shapes with 1 arguments for class 'Pad' (<bound method OpRun.infer_shapes of <mlprodict.onnxrt.ops_cpu.op_pad.Pad object at 0x7fbe7390b190>>)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 282, in _init
self.shapes_ = self._set_shape_inference_runtime()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1421, in _set_shape_inference_runtime
raise RuntimeError("Unable to infer shape of node {}\n{}".format(
RuntimeError: Unable to infer shape of node 0
0 --> Onnx-Pad(0) -> 1
======================================================================
ERROR: test_Softplus_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Softplus' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_ZeroPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 238, in infer_shapes
res = self._infer_shapes(*args, **kwargs)
TypeError: _infer_shapes() missing 1 required positional argument: 'pads'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 438, in _set_shape_inference_runtime
res = self.ops_.infer_shapes(*args)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 240, in infer_shapes
raise TypeError( # pragma: no cover
TypeError: Issues with (operator 'Pad') and shapes
ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0')
----args
(ShapeObject((2, 3, 4, 4), dtype=numpy.float32, name='0'),)
------kwargs
{}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1410, in _set_shape_inference_runtime
s = node._set_shape_inference_runtime(values)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 440, in _set_shape_inference_runtime
raise TypeError(
TypeError: Unable to call infer_shapes with 1 arguments for class 'Pad' (<bound method OpRun.infer_shapes of <mlprodict.onnxrt.ops_cpu.op_pad.Pad object at 0x7fbe5c3617f0>>)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 282, in _init
self.shapes_ = self._set_shape_inference_runtime()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1421, in _set_shape_inference_runtime
raise RuntimeError("Unable to infer shape of node {}\n{}".format(
RuntimeError: Unable to infer shape of node 0
0 --> Onnx-Pad(0) -> 1
======================================================================
ERROR: test_operator_add_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 37, in shape_add
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('0', [2, 3], dtype('float64')) and ShapeResult('1', [3], dtype('float64')).
======================================================================
ERROR: test_operator_add_size1_right_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 103, in shape_dispatch
return fct_shape(known_shape, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 37, in shape_add
return _element_wise(known_shapes, node)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 32, in _element_wise
node.output[0], ShapeResult.broadcast(x, y, name=node.output[0]))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 356, in broadcast
raise ShapeInferenceException( # pragma: no cover
mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceException: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('0', [2, 3], dtype('float64')) and ShapeResult('1', [3], dtype('float64')).
======================================================================
ERROR: test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_chunk_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_concat2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_conv_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_operator_convtranspose_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_operator_flatten_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_index_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_maxpool_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_operator_pad_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 238, in infer_shapes
res = self._infer_shapes(*args, **kwargs)
TypeError: _infer_shapes() missing 1 required positional argument: 'pads'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 438, in _set_shape_inference_runtime
res = self.ops_.infer_shapes(*args)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 240, in infer_shapes
raise TypeError( # pragma: no cover
TypeError: Issues with (operator 'Pad') and shapes
ShapeObject((1, 1, 2, 4), dtype=numpy.float32, name='0')
----args
(ShapeObject((1, 1, 2, 4), dtype=numpy.float32, name='0'),)
------kwargs
{}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1410, in _set_shape_inference_runtime
s = node._set_shape_inference_runtime(values)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 440, in _set_shape_inference_runtime
raise TypeError(
TypeError: Unable to call infer_shapes with 1 arguments for class 'Pad' (<bound method OpRun.infer_shapes of <mlprodict.onnxrt.ops_cpu.op_pad.Pad object at 0x7fbe5c47d220>>)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 282, in _init
self.shapes_ = self._set_shape_inference_runtime()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1421, in _set_shape_inference_runtime
raise RuntimeError("Unable to infer shape of node {}\n{}".format(
RuntimeError: Unable to infer shape of node 0
0 --> Onnx-Pad(0) -> 1
======================================================================
ERROR: test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
raise NotImplementedError(
NotImplementedError: Optional inputs are not implemented yet. (name='1')
======================================================================
ERROR: test_operator_permute2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_reduced_mean_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_reduced_mean_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_reduced_sum_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_reduced_sum_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_repeat_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Tile' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_operator_repeat_dim_overflow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Tile' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_operator_symbolic_override_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'InstanceNormalization' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_operator_symbolic_override_nested_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_operator_view_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_bvlc_alexnet_cpu (__main__.OnnxBackendRealModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'LRN' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_expand_shape_model1_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_expand_shape_model2_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_expand_shape_model3_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_expand_shape_model4_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Gradient' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Gradient' from domain 'ai.onnx.preview.training' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model1_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SequenceEmpty' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model2_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SequenceErase' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model3_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SequenceErase' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model4_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SequenceConstruct' domain='', looking for 'shape_sequenceconstruct' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sequence_model5_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'SequenceConstruct' domain='', looking for 'shape_sequenceconstruct' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_sequence_model6_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SplitToSequence' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model7_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SplitToSequence' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_sequence_model8_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'SplitToSequence' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_shrink_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 303, in create_inference_session
return _CombineModels(OnnxInference(content),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 142, in __init__
self._init(existing_functions)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 260, in _init
node.setup_runtime(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 260, in setup_runtime
raise e
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 248, in setup_runtime
self.ops_ = load_op(self.onnx_node, desc=self.desc,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
return lo(onnx_node, desc=desc, options=options)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 85, in load_op
raise MissingOperatorError( # pragma no cover
mlprodict.onnxrt.excs.MissingOperatorError: Operator 'Shrink' from domain '' has no runtime yet. Available list:
- Abs Acos Acosh Add And ArgMax ArgMin ArrayFeatureExtractor Asin Asinh
Atan Atanh AveragePool BatchNormalization Bernoulli Binarizer BitShift
BroadcastGradientArgs CDist Cast CastLike CategoryMapper Ceil Celu
Clip ComplexAbs Compress Concat ConcatFromSequence Constant
ConstantOfShape Conv ConvTranspose Cos Cosh CumSum DEBUG
DequantizeLinear Det DictVectorizer Div Dropout Einsum Elu Equal Erf
Exp Expand EyeLike FFT FFT2D FeatureVectorizer Flatten Floor
FusedMatMul Gather GatherElements Gemm GlobalAveragePool Greater
GreaterOrEqual HardSigmoid Hardmax Identity If Imputer IsInf IsNaN
LabelEncoder LeakyRelu Less LessOrEqual LinearClassifier
LinearRegressor Log LogSoftmax Loop LpNormalization MatMul Max MaxPool
Mean Min Mod Mul Neg NegativeLogLikelihoodLoss Normalizer Not
OneHotEncoder OpRun Or PRelu Pad Pow QLinearConv QuantizeLinear RFFT
RNN RandomNormal RandomNormalLike RandomUniform RandomUniformLike
Range Reciprocal ReduceL1 ReduceL2 ReduceLogSum ReduceLogSumExp
ReduceMax ReduceMean ReduceMin ReduceProd ReduceSum ReduceSumSquare
Relu Reshape Round SVMClassifier SVMClassifierDouble SVMRegressor
SVMRegressorDouble Scaler Scan ScatterElements Selu SequenceAt
SequenceConstruct SequenceInsert Shape Sigmoid Sign Sin Sinh Size
Slice Softmax SoftmaxCrossEntropyLoss SoftmaxGrad Solve Split Sqrt
Squeeze StringNormalizer Sub Sum Tan Tanh TfIdfVectorizer Tokenizer
TopK Transpose TreeEnsembleClassifier TreeEnsembleClassifierDouble
TreeEnsembleRegressor TreeEnsembleRegressorDouble Trilu Unsqueeze
Where Xor YieldOp ZipMap
======================================================================
ERROR: test_strnorm_model_monday_casesensintive_lower_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnorm_model_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnorm_model_monday_casesensintive_upper_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnorm_model_monday_empty_output_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnorm_model_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
ERROR: test_strnorm_model_nostopwords_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 295, in run
prepared_model = self.backend.prepare(model, device)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
return cls.prepare(binm, device, **kwargs)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
inf = cls.create_inference_session(model)
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
OnnxShapeInference(content))
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
self.known_shapes_ = self._run_empty()
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 165, in _run_empty
cont = cont or shape_dispatch(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
raise RuntimeError( # pragma: no cover
RuntimeError: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
shape_mod shape_mul shape_neg shape_not shape_or shape_pow
shape_reciprocal shape_relu shape_result shape_round shape_selu
shape_sigmoid shape_sign shape_sin shape_sinh shape_softmax shape_sqrt
shape_sub shape_tan shape_tanh shape_trilu shape_xor
======================================================================
FAIL: test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 189, in assert_similar_outputs
np.testing.assert_array_equal(outputs[i], ref_outputs[i])
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 934, in assert_array_equal
assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Arrays are not equal
Mismatched elements: 12 / 12 (100%)
x: array([[0.9767611026763916, 0.6048455238342285, 0.7392635941505432,
0.03918779268860817],
[0.28280696272850037, 0.12019655853509903, 0.296140193939209,...
y: array([['0.9767611', '0.6048455', '0.7392636', '0.039187793'],
['0.28280696', '0.12019656', '0.2961402', '0.11872772'],
['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
dtype=object)
======================================================================
FAIL: test_isinf_negative_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 3 / 6 (50%)
x: array([False, False, True, False, False, True])
y: array([False, False, False, False, True, False])
======================================================================
FAIL: test_isinf_positive_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 3 / 6 (50%)
x: array([False, False, False, False, True, False])
y: array([False, False, True, False, False, True])
======================================================================
FAIL: test_logsoftmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 60 / 60 (100%)
Max absolute difference: 1.374
Max relative difference: 0.679
x: array([[[-0.63786 , -2.150428, -0.994332, -0.32463 , -0.607985],
[-1.424634, -1.600497, -1.821713, -2.462304, -2.064944],
[-2.257869, -1.096312, -1.212032, -2.443848, -2.031679],...
y: array([[[-1.488776, -2.852671, -2.27409 , -1.011935, -1.38527 ],
[-1.222501, -1.24969 , -2.048422, -2.09656 , -1.78918 ],
[-2.184687, -0.874457, -1.567693, -2.207056, -1.884868],...
======================================================================
FAIL: test_mod_int64_fmod_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 2 / 6 (33.3%)
Max absolute difference: 3
Max relative difference: 3.
x: array([ 0, -2, 5, 0, 2, 3])
y: array([ 0, 1, 5, 0, -1, 3])
======================================================================
FAIL: test_mod_mixed_sign_float16_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 4 / 6 (66.7%)
Max absolute difference: 3.4
Max relative difference: 20.67
x: array([ 1.998, -3.002, 5. , -1.998, 3.002, 3. ], dtype=float16)
y: array([-0.10156, 0.3984 , 5. , 0.10156, -0.3984 , 3. ],
dtype=float16)
======================================================================
FAIL: test_mod_mixed_sign_float32_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 4 / 6 (66.7%)
Max absolute difference: 3.4
Max relative difference: 21.
x: array([ 2., -3., 5., -2., 3., 3.], dtype=float32)
y: array([-0.1, 0.4, 5. , 0.1, -0.4, 3. ], dtype=float32)
======================================================================
FAIL: test_mod_mixed_sign_float64_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 4 / 6 (66.7%)
Max absolute difference: 3.4
Max relative difference: 21.
x: array([ 2., -3., 5., -2., 3., 3.])
y: array([-0.1, 0.4, 5. , 0.1, -0.4, 3. ])
======================================================================
FAIL: test_selu_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 28 / 60 (46.7%)
Max absolute difference: 3.689
Max relative difference: 0.667
x: array([[[ 5.292157, 1.200472, 2.936214, 6.722679, 5.602674],
[-1.247332, 2.850265, -0.280919, -0.196141, 1.231796],
[ 0.432131, 4.362821, 2.283113, 0.365025, 1.33159 ],...
y: array([[[ 5.292157, 1.200472, 2.936214, 6.722679, 5.602674],
[-3.741995, 2.850265, -0.842756, -0.588423, 1.231796],
[ 0.432131, 4.362821, 2.283113, 0.365025, 1.33159 ],...
======================================================================
FAIL: test_selu_default_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 28 / 60 (46.7%)
Max absolute difference: 0.078
Max relative difference: 0.048
x: array([[[ 1.853492, 0.420446, 1.028361, 2.354509, 1.962245],
[-1.043557, 0.998259, -0.235026, -0.164098, 0.431416],
[ 0.151347, 1.528007, 0.799623, 0.127844, 0.466368],...
y: array([[[ 1.853492, 0.420446, 1.028361, 2.354509, 1.962245],
[-1.096467, 0.998259, -0.246942, -0.172418, 0.431416],
[ 0.151347, 1.528007, 0.799623, 0.127844, 0.466368],...
======================================================================
FAIL: test_selu_example_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 1 / 3 (33.3%)
Max absolute difference: 2.528
Max relative difference: 0.667
x: array([-1.264241, 0. , 3. ], dtype=float32)
y: array([-3.792723, 0. , 3. ], dtype=float32)
======================================================================
FAIL: test_softmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 60 / 60 (100%)
Max absolute difference: 0.359
Max relative difference: 2.949
x: array([[[0.528422, 0.116434, 0.369971, 0.722795, 0.544447],
[0.240596, 0.201796, 0.161748, 0.085238, 0.126825],
[0.104573, 0.334101, 0.297592, 0.086826, 0.131115],...
y: array([[[0.225649, 0.05769 , 0.10289 , 0.363515, 0.250256],
[0.294493, 0.286594, 0.128938, 0.122878, 0.167097],
[0.112513, 0.417088, 0.208526, 0.110024, 0.151849],...
======================================================================
FAIL: test_SELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 12 / 30 (40%)
Max absolute difference: 0.066
Max relative difference: 0.048
x: array([[[ 0.692616, 0.1449 , -0.808552, 0.983208, 0.129128],
[-0.348553, 0.186445, -0.195859, 0.770409, -0.873378]],
...
y: array([[[ 0.692616, 0.1449 , -0.849546, 0.983208, 0.129128],
[-0.366225, 0.186445, -0.205789, 0.770409, -0.917659]],
...
======================================================================
FAIL: test_operator_selu_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 265, in device_test_func
return test_func(*args, device=device, **kwargs)
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 321, in run
self.assert_similar_outputs(ref_outputs, outputs,
File "/usr/local/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 191, in assert_similar_outputs
np.testing.assert_allclose(
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1530, in assert_allclose
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
File "/var/lib/jenkins/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
raise AssertionError(msg)
AssertionError:
Not equal to tolerance rtol=0.001, atol=1e-07
Mismatched elements: 14 / 24 (58.3%)
Max absolute difference: 0.078
Max relative difference: 0.048
x: array([[[[-0.176871, -0.654911, 0.171342, -0.980393],
[ 0.056633, 0.702261, -0.096904, -0.62485 ],
[ 0.669205, -0.853963, -1.106803, -0.712424]],...
y: array([[[[-0.185838, -0.688116, 0.171342, -1.0301 ],
[ 0.056633, 0.702261, -0.101817, -0.65653 ],
[ 0.669205, -0.897259, -1.162919, -0.748545]],...
----------------------------------------------------------------------
Ran 2026 tests in 10.649s
FAILED (failures=14, errors=781, skipped=1021)