Skip to content

Commit 823c327

Browse files
markrogersjrMarisaKirisame
authored andcommitted
Unified error handling in NNVM and Relay frontends (apache#2828)
1 parent e021565 commit 823c327

15 files changed

Lines changed: 471 additions & 371 deletions

File tree

nnvm/python/nnvm/frontend/caffe2.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from __future__ import absolute_import as _abs
44
import tvm
55
from nnvm import symbol as _sym
6-
from nnvm.frontend.common import get_nnvm_op, Renamer, AttrConverter as AttrCvt
6+
from .common import get_nnvm_op
77
from .onnx_caffe2_utils import dimension_picker, dimension_constraint, infer_channels, revert_caffe2_pad
88
from . import onnx
99

@@ -73,8 +73,8 @@ def get_converter(cls):
7373

7474
if hasattr(cls, '_impl'):
7575
return getattr(cls, '_impl')
76-
raise NotImplementedError('{} not implemented'.format(
77-
cls.__name__))
76+
raise tvm.error.OpNotImplemented(
77+
'Operator {} is not implemented in frontend Caffe2.'.format(cls.__name__))
7878

7979

8080
_caffe2_internal_args = {
@@ -176,8 +176,7 @@ def _get_axis_from_order_str(order):
176176
return 1
177177
if order == 'NHWC':
178178
return 3
179-
raise RuntimeError(
180-
"Unsupported storage order: {} in caffe2".format(order))
179+
raise tvm.error.OpAttributeInvalid('Value {} in attribute {} of operator {} is not valid.'.format(order, 'order', 'Concat'))
181180

182181
return AttrCvt(
183182
op_name='concatenate',
@@ -427,8 +426,8 @@ def _convert_operator(self,
427426
# Add a sanitizing step to convert all byte strings in args to strings
428427
sym = convert_map[op_type](inputs, args, self._params)
429428
else:
430-
raise NotImplementedError(
431-
"Operator {} not implemented.".format(op_type))
429+
raise tvm.error.OpNotImplemented(
430+
'Operator {} is not supported in frontend Caffe2.'.format(op_type))
432431
return sym
433432

434433

nnvm/python/nnvm/frontend/common.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,25 @@
77
def get_nnvm_op(op_name):
88
op = getattr(_sym, op_name)
99
if not op:
10-
raise RuntimeError("Unable to map op_name {} to nnvm.sym".format(op_name))
10+
raise OpNotImplemented(
11+
'Operator {} is not supported.'.format(op))
1112
return op
1213

14+
def required_attr(attr, key, op_name):
15+
assert isinstance(attr, dict)
16+
if key not in attr:
17+
raise OpAttributeRequired(
18+
'Required attribute {} not found in operator {}'.format(key, op_name))
19+
return attr[key]
20+
21+
def parse_tshape(tshape):
22+
"""Parse tshape in string."""
23+
return [int(x.strip()) for x in tshape.strip('()').split(',')]
24+
25+
def parse_bool_str(attr, key, default='False'):
26+
"""Parse bool string to boolean."""
27+
return attr.get(key, default).strip().lower() in ['true', '1', 't', 'y', 'yes']
28+
1329
class Renamer(object):
1430
"""A simply renamer for operators.
1531

nnvm/python/nnvm/frontend/coreml.py

Lines changed: 21 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,10 @@
22
"""CoreML frontend."""
33
from __future__ import absolute_import as _abs
44
import numpy as np
5-
65
import tvm
6+
from .common import SymbolTable
77
from .. import symbol as _sym
88
from .._base import string_types
9-
from .common import SymbolTable
109

1110
__all__ = ['from_coreml']
1211

@@ -83,7 +82,8 @@ def BatchnormLayerParams(op, insym, symtab):
8382
"""Get layer of batchnorm parameter"""
8483
# this changes the symbol
8584
if op.instanceNormalization:
86-
raise NotImplementedError("instance normalization not implemented")
85+
msg = 'Operator "instance normalization" is not supported in frontend CoreML.'
86+
raise tvm.error.OpNotImplemented(msg)
8787
else:
8888
params = {'gamma':symtab.new_const(list(op.gamma.floatValue)),
8989
'beta':symtab.new_const(list(op.beta.floatValue)),
@@ -136,7 +136,8 @@ def ActivationParams(op, insym, symtab):
136136
betasym = symtab.new_const(beta)
137137
return _sym.broadcast_mul(_sym.log(_sym.broadcast_add(
138138
_sym.exp(insym), betasym)), alphasym)
139-
raise NotImplementedError('%s not implemented' % whichActivation)
139+
raise tvm.error.OpNotImplemented(
140+
'Operator {} is not supported in frontend CoreML.'.format(whichActivation))
140141

141142
def ScaleLayerParams(op, insym, symtab):
142143
"""Scale layer params."""
@@ -158,7 +159,8 @@ def PoolingLayerParams(op, insym, symtab):
158159
return _sym.global_max_pool2d(insym)
159160
if op.type == 1:
160161
return _sym.global_avg_pool2d(insym)
161-
raise NotImplementedError("Only max and average pooling implemented")
162+
raise tvm.error.OpNotImplemented(
163+
'Operator pooling (not max or average) is not supported in frontend CoreML.')
162164

163165
else:
164166
params = {'pool_size':list(op.kernelSize),
@@ -178,7 +180,8 @@ def PoolingLayerParams(op, insym, symtab):
178180
params['padding'] = padding
179181
params['ceil_mode'] = True
180182
else:
181-
raise NotImplementedError("Other convolution padding not implemented")
183+
msg = 'Value {} in attribute PoolingPaddingType of operator Pooling is not valid.'
184+
raise tvm.error.OpAttributeInvalid(msg.format(op.WhichOneof('PoolingPaddingType')))
182185

183186
# consume padding layer
184187
if symtab.in_padding:
@@ -190,7 +193,8 @@ def PoolingLayerParams(op, insym, symtab):
190193
return _sym.max_pool2d(insym, **params)
191194
if op.type == 1:
192195
return _sym.avg_pool2d(insym, **params)
193-
raise NotImplementedError("Only max and average pooling implemented")
196+
msg = 'Operator pooling (not max or average) is not supported in frontend CoreML.'
197+
raise tvm.error.OpNotImplemented(msg)
194198

195199
def SoftmaxLayerParams(op, insym, symtab):
196200
return _sym.softmax(_sym.flatten(insym))
@@ -229,7 +233,8 @@ def ConcatLayerParams(op, insyms, symtab):
229233
if not isinstance(insyms, list):
230234
insyms = [insyms]
231235
if op.sequenceConcat:
232-
raise NotImplementedError("Sequence Concat not supported")
236+
raise tvm.error.OpNotImplemented(
237+
'Operator Sequence Concat is not supported in frontend CoreML.')
233238
ret = _sym.concatenate(*insyms, axis=1)
234239
return ret
235240

@@ -243,14 +248,16 @@ def PaddingLayerParams(op, insym, symtab):
243248
if op.WhichOneof('PaddingType') == 'constant':
244249
constant = op.constant
245250
if constant.value != 0:
246-
raise NotImplementedError("Padding value {} not supported.".format(constant.value))
251+
msg = 'Value {} in attribute "padding value" of operator Padding is not valid.'
252+
raise tvm.error.OpAttributeInvalid(msg.format(constant.value))
247253
padding = [b.startEdgeSize for b in op.paddingAmounts.borderAmounts]
248254
padding2 = [b.endEdgeSize for b in op.paddingAmounts.borderAmounts]
249255
for i, j in zip(padding, padding2):
250256
assert i == j
251257
symtab.set_padding(padding)
252258
else:
253-
raise NotImplementedError("Only constant padding is supported now.")
259+
raise tvm.error.OpNotImplemented(
260+
'Operator "non-constant padding" is not supported in frontend CoreML.')
254261
return insym
255262

256263
def PermuteLayerParams(op, insym, symtab):
@@ -259,8 +266,8 @@ def PermuteLayerParams(op, insym, symtab):
259266

260267
def UpsampleLayerParams(op, insym, symtab):
261268
if op.scalingFactor[0] != op.scalingFactor[1]:
262-
raise NotImplementedError("Upsampling only supported with same \
263-
height and width scaling factor.")
269+
raise tvm.error.OpAttributeInvalid(
270+
'Height and width scaling factors of Upsample operator must be equal.')
264271
interpolationMode = 'NEAREST_NEIGHBOR' if op.mode == 0 else 'BILINEAR'
265272
return _sym.upsampling(insym, scale=op.scalingFactor[0], method=interpolationMode)
266273

@@ -341,7 +348,8 @@ def coreml_op_to_nnvm(op, inname, outname, symtab):
341348
"""
342349
classname = type(op).__name__
343350
if classname not in _convert_map:
344-
raise NotImplementedError("%s is not supported" % (classname))
351+
raise tvm.error.OpNotImplemented(
352+
'Operator {} is not supported in frontend CoreML.'.format(classname))
345353
if isinstance(inname, string_types):
346354
insym = symtab.get_var(inname)
347355
else:

0 commit comments

Comments
 (0)