Coverage for mlprodict/onnxrt/ops_cpu/op_linear_classifier.py: 94%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- encoding: utf-8 -*-
2# pylint: disable=E0203,E1101,C0111
3"""
4@file
5@brief Runtime operator.
6"""
7import numpy
8from scipy.special import expit # pylint: disable=E0611
9from ._op import OpRunClassifierProb
10from ._op_classifier_string import _ClassifierCommon
11from ._op_numpy_helper import numpy_dot_inplace
14class LinearClassifier(OpRunClassifierProb, _ClassifierCommon):
16 atts = {'classlabels_ints': [], 'classlabels_strings': [],
17 'coefficients': None, 'intercepts': None,
18 'multi_class': 0, 'post_transform': b'NONE'}
20 def __init__(self, onnx_node, desc=None, **options):
21 OpRunClassifierProb.__init__(self, onnx_node, desc=desc,
22 expected_attributes=LinearClassifier.atts,
23 **options)
24 self._post_process_label_attributes()
25 if not isinstance(self.coefficients, numpy.ndarray):
26 raise TypeError( # pragma: no cover
27 "coefficient must be an array not {}.".format(
28 type(self.coefficients)))
29 if len(getattr(self, "classlabels_ints", [])) == 0 and \
30 len(getattr(self, 'classlabels_strings', [])) == 0:
31 raise ValueError( # pragma: no cover
32 "Fields classlabels_ints or classlabels_strings must be specified.")
33 self.nb_class = max(len(getattr(self, 'classlabels_ints', [])),
34 len(getattr(self, 'classlabels_strings', [])))
35 if len(self.coefficients.shape) != 1:
36 raise ValueError( # pragma: no cover
37 "coefficient must be an array but has shape {}\n{}.".format(
38 self.coefficients.shape, desc))
39 n = self.coefficients.shape[0] // self.nb_class
40 self.coefficients = self.coefficients.reshape(self.nb_class, n).T
42 def _run(self, x): # pylint: disable=W0221
43 scores = numpy_dot_inplace(self.inplaces, x, self.coefficients)
44 if self.intercepts is not None:
45 scores += self.intercepts
47 if self.post_transform == b'NONE':
48 pass
49 elif self.post_transform == b'LOGISTIC':
50 expit(scores, out=scores)
51 elif self.post_transform == b'SOFTMAX':
52 numpy.subtract(scores, scores.max(axis=1)[
53 :, numpy.newaxis], out=scores)
54 numpy.exp(scores, out=scores)
55 numpy.divide(scores, scores.sum(axis=1)[
56 :, numpy.newaxis], out=scores)
57 else:
58 raise NotImplementedError("Unknown post_transform: '{}'.".format(
59 self.post_transform))
61 if self.nb_class == 1:
62 label = numpy.zeros((scores.shape[0],), dtype=x.dtype)
63 label[scores > 0] = 1
64 else:
65 label = numpy.argmax(scores, axis=1)
66 return self._post_process_predicted_label(label, scores)