Coverage for mlprodict/onnxrt/ops_cpu/op_random.py: 86%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- encoding: utf-8 -*-
2# pylint: disable=E0203,E1101,C0111
3"""
4@file
5@brief Runtime operator.
6"""
7import numpy
8from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE
9from ._op import OpRun
10from ..shape_object import ShapeObject
13class _CommonRandom(OpRun):
14 """
15 Common methods to all random operators.
16 """
18 def __init__(self, *args, **kwargs):
19 OpRun.__init__(self, *args, **kwargs)
21 def _dtype(self, *data, dtype_first=False):
22 if dtype_first:
23 if self.dtype != 0:
24 return self.numpy_type
25 if len(data) > 0:
26 return data[0].dtype
27 raise RuntimeError( # pragma: no cover
28 "dtype cannot be None for operator %s, "
29 "self.numpy_type=%r, len(data)=%r."
30 "" % (self.__class__.__name__,
31 self.numpy_type, len(data)))
32 res = None
33 if len(data) == 0:
34 res = self.numpy_type
35 elif self.numpy_type is not None:
36 res = self.numpy_type
37 elif hasattr(data[0], 'dtype'):
38 res = data[0].dtype
39 if res is None:
40 raise RuntimeError( # pragma: no cover
41 "dtype cannot be None for operator %s, "
42 "self.numpy_type=%r, type(data[0])=%r."
43 "" % (self.__class__.__name__,
44 self.numpy_type, type(data[0])))
45 return res
47 def _infer_shapes(self, *data): # pylint: disable=W0221
48 return (ShapeObject(None, self._dtype(*data)), )
50 def _infer_types(self, *data): # pylint: disable=W0221
51 return (self._dtype(*data), )
53 def _infer_sizes(self, *args, **kwargs):
54 res = self.run(*args, **kwargs)
55 return (dict(temp=0), ) + res
57 def _get_state(self, seed):
58 if numpy.isnan(self.seed):
59 state = numpy.random.RandomState()
60 else:
61 state = numpy.random.RandomState(seed=self.seed)
62 return state
65class Bernoulli(_CommonRandom):
67 atts = {'dtype': 0,
68 'seed': numpy.nan}
70 def __init__(self, onnx_node, desc=None, **options):
71 _CommonRandom.__init__(self, onnx_node, desc=desc,
72 expected_attributes=Bernoulli.atts,
73 **options)
74 self.numpy_type = (
75 TENSOR_TYPE_TO_NP_TYPE[self.dtype] if self.dtype > 0
76 else None)
78 def _run(self, x): # pylint: disable=W0221
79 dtype = self._dtype(x, dtype_first=True)
80 state = self._get_state(self.seed)
81 res = state.binomial(1, p=x).astype(dtype)
82 return (res.astype(dtype), )
84 def to_python(self, inputs):
85 lines = [
86 'numpy_dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]',
87 'state = numpy.random.RandomState(seed=seed)',
88 'return state.binomial(1, %s).astype(numpy_dtype)' % (
89 inputs[0], )]
90 return ("import numpy\nfrom numpy import nan\n"
91 "from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE",
92 "\n".join(lines))
95class RandomUniform(_CommonRandom):
97 atts = {'dtype': 1,
98 'low': 0.,
99 'high': 1.,
100 'seed': numpy.nan,
101 'shape': []}
103 def __init__(self, onnx_node, desc=None, **options):
104 _CommonRandom.__init__(self, onnx_node, desc=desc,
105 expected_attributes=RandomUniform.atts,
106 **options)
107 if len(self.shape) == 0:
108 raise ValueError( # pragma: no cover
109 "shape cannot be empty for operator %s."
110 "" % self.__class__.__name__)
111 self.numpy_type = TENSOR_TYPE_TO_NP_TYPE[self.dtype]
113 def _run(self, *args): # pylint: disable=W0221
114 if len(args) != 0:
115 raise RuntimeError( # pragma: no cover
116 "Operator %s cannot have inputs." % self.__class__.__name__)
117 dtype = self._dtype(*args)
118 state = self._get_state(self.seed)
119 res = state.rand(*self.shape).astype(dtype)
120 res *= (self.high - self.low)
121 res += self.low
122 return (res.astype(dtype), )
124 def to_python(self, inputs):
125 lines = [
126 'numpy_dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]',
127 'state = numpy.random.RandomState(seed=seed)',
128 'return (state.rand(*%r).astype(numpy.%s) * (%f - %f)) + %f' % (
129 list(self.shape), self.numpy_type, self.high, self.low, self.low)]
130 return ("import numpy\nfrom onnx.mapping import TENSOR_TYPE_TO_NP_TYPE",
131 "\n".join(lines))
134class RandomUniformLike(_CommonRandom):
136 atts = {'low': 0.,
137 'high': 1.,
138 'seed': numpy.nan,
139 'dtype': 0}
141 def __init__(self, onnx_node, desc=None, **options):
142 _CommonRandom.__init__(self, onnx_node, desc=desc,
143 expected_attributes=RandomUniformLike.atts,
144 **options)
145 self.numpy_type = (
146 None if self.dtype == 0 else TENSOR_TYPE_TO_NP_TYPE[self.dtype])
148 def _run(self, x): # pylint: disable=W0221
149 dtype = self._dtype(x)
150 state = self._get_state(self.seed)
151 res = state.rand(*x.shape).astype(dtype)
152 res *= (self.high - self.low)
153 res += self.low
154 return (res.astype(dtype), )
156 def to_python(self, inputs):
157 if len(inputs) > 0 and hasattr(inputs[0], 'dtype'):
158 dtype = inputs[0].dtype
159 shape = inputs[0].shape
160 else:
161 dtype = self.numpy_type or numpy.float32
162 shape = (1, )
163 lines = [
164 'numpy_dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]',
165 'state = numpy.random.RandomState(seed=seed)',
166 'return (state.rand(*%r).astype(numpy.%s) * (%f - %f)) + %f' % (
167 shape, dtype, self.high, self.low, self.low)]
168 return ("import numpy\nfrom onnx.mapping import TENSOR_TYPE_TO_NP_TYPE",
169 "\n".join(lines))
172class RandomNormal(_CommonRandom):
174 atts = {'dtype': 1,
175 'mean': 0.,
176 'scale': 1.,
177 'seed': numpy.nan,
178 'shape': []}
180 def __init__(self, onnx_node, desc=None, **options):
181 _CommonRandom.__init__(self, onnx_node, desc=desc,
182 expected_attributes=RandomNormal.atts,
183 **options)
184 if len(self.shape) == 0:
185 raise ValueError( # pragma: no cover
186 "shape cannot be empty for operator %s."
187 "" % self.__class__.__name__)
188 self.numpy_type = TENSOR_TYPE_TO_NP_TYPE[self.dtype]
190 def _run(self, *args): # pylint: disable=W0221
191 if len(args) != 0:
192 raise RuntimeError( # pragma: no cover
193 "Operator %s cannot have inputs." % self.__class__.__name__)
194 state = self._get_state(self.seed)
195 res = state.randn(*self.shape).astype(self.numpy_type)
196 res *= self.scale
197 res += self.mean
198 return (res.astype(self.numpy_type), )
200 def to_python(self, inputs):
201 lines = [
202 'numpy_dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]',
203 'state = numpy.random.RandomState(seed=seed)',
204 'return (state.randn(*%r).astype(numpy.%s) * %f) + %f' % (
205 list(self.shape), self.numpy_type, self.scale, self.mean)]
206 return ("import numpy\nfrom onnx.mapping import TENSOR_TYPE_TO_NP_TYPE",
207 "\n".join(lines))
210class RandomNormalLike(_CommonRandom):
212 atts = {'dtype': 0,
213 'mean': 0.,
214 'scale': 1.,
215 'seed': numpy.nan}
217 def __init__(self, onnx_node, desc=None, **options):
218 _CommonRandom.__init__(self, onnx_node, desc=desc,
219 expected_attributes=RandomNormalLike.atts,
220 **options)
221 self.numpy_type = (
222 None if self.dtype == 0 else TENSOR_TYPE_TO_NP_TYPE[self.dtype])
224 def _run(self, x): # pylint: disable=W0221
225 dtype = self._dtype(x)
226 state = self._get_state(self.seed)
227 res = state.randn(*x.shape).astype(dtype)
228 res *= self.scale
229 res += self.mean
230 return (res.astype(dtype), )
232 def to_python(self, inputs):
233 if len(inputs) > 0 and hasattr(inputs[0], 'dtype'):
234 dtype = inputs[0].dtype
235 shape = inputs[0].shape
236 else:
237 dtype = self.numpy_type or numpy.float32
238 shape = (1, )
239 lines = [
240 'numpy_dtype = TENSOR_TYPE_TO_NP_TYPE[dtype]',
241 'state = numpy.random.RandomState(seed=seed)',
242 'return (state.randn(%r).astype(numpy.%s) * %f) + %f' % (
243 shape, dtype, self.scale, self.mean)]
244 return ("import numpy\nfrom onnx.mapping import TENSOR_TYPE_TO_NP_TYPE",
245 "\n".join(lines))