Coverage for mlprodict/onnxrt/ops_cpu/op_log_softmax.py: 100%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

20 statements  

1# -*- encoding: utf-8 -*- 

2# pylint: disable=E0203,E1101,C0111 

3""" 

4@file 

5@brief Runtime operator. 

6""" 

7import numpy 

8from .op_softmax import Softmax 

9 

10 

11class LogSoftmax(Softmax): 

12 

13 atts = {'axis': 1} 

14 

15 def __init__(self, onnx_node, desc=None, **options): 

16 Softmax.__init__(self, onnx_node, desc=desc, 

17 **options) 

18 

19 def _run(self, X): # pylint: disable=W0221 

20 if self.inplaces.get(0, False): 

21 return self._run_inplace(X) 

22 Y = Softmax._run(self, X)[0] 

23 numpy.log(Y, out=Y) 

24 return (Y, ) 

25 

26 def _run_inplace(self, X): 

27 Y = Softmax._run_inplace(self, X)[0] 

28 numpy.log(Y, out=Y) 

29 return (Y, ) 

30 

31 def to_python(self, inputs): 

32 lines = [ 

33 "Y = {0} - {0}.max(axis=axis)[:, numpy.newaxis]".format(inputs[0]), 

34 "numpy.exp(Y, out=Y)", 

35 "Y /= Y.sum(axis=axis)[:, numpy.newaxis]", 

36 'numpy.log(Y, out=Y)', 

37 "return Y"] 

38 return ("import numpy", "\n".join(lines))