66import numpy as np
77import tvm
88from .. import symbol as _sym
9+ from .common import get_nnvm_op , required_attr , parse_tshape , parse_bool_str
910
1011class LAYERTYPE (object ):
1112 """Darknet LAYERTYPE Class constant."""
@@ -61,7 +62,8 @@ def _darknet_maxpooling(inputs, attrs):
6162 """Process the max pool 2d operation."""
6263 kernel = parse_tshape (required_attr (attrs , 'kernel' , 'maxpool' ))
6364 if len (kernel ) != 1 :
64- raise_attribute_unimplemented ('non-2d kernel' , 'pool_2d' )
65+ raise tvm .error .OpAttributeUnimplemented (
66+ 'Non-2D kernels for Max Pooling are not supported in frontend Darknet.' )
6567
6668 op_name , new_attrs = 'max_pool2d' , {}
6769 strides = int (attrs .get ('stride' , (1 , 1 )))
@@ -79,7 +81,8 @@ def _darknet_avgpooling(inputs, attrs):
7981 """Process the average pool 2d operation."""
8082 kernel = parse_tshape (required_attr (attrs , 'kernel' , 'avgpool' ))
8183 if len (kernel ) != 1 :
82- raise_attribute_unimplemented ('non-2d kernel' , 'pool_2d' )
84+ raise tvm .error .OpAttributeUnimplemented (
85+ 'Non-2D kernels for Average Pooling are not supported in frontend Darknet.' )
8386
8487 op_name , new_attrs = 'avg_pool2d' , {}
8588 strides = int (attrs .get ('stride' , (1 , 1 )))
@@ -103,10 +106,12 @@ def _darknet_conv2d(inputs, attrs):
103106 """Process the convolution 2d operation."""
104107 kernel = parse_tshape (required_attr (attrs , 'kernel' , 'conv2d' ))
105108 if len (kernel ) != 1 :
106- raise_attribute_unimplemented ('non 2d kernel' , 'conv2d' )
109+ raise tvm .error .OpAttributeUnimplemented ('Non-2D kernels for Conv2D are unsupported '
110+ 'in frontend Darknet.' )
107111 layout = attrs .get ('layout' , 'NCHW' )
108112 if layout not in ['NCHW' , 'NHWC' ]:
109- raise_attribute_invalid (layout , 'layout' , 'conv2d' )
113+ raise tvm .error .OpAttributeInvalid (
114+ 'Value {} in attribute "layout" of operator Conv2D is not valid.' .format (layout ))
110115 strides = int (attrs .get ('stride' , (1 , 1 )))
111116 pads = int (attrs .get ('pad' , (0 , 0 )))
112117
@@ -142,13 +147,16 @@ def _darknet_conv2d(inputs, attrs):
142147def _darknet_conv2d_transpose (inputs , attrs ):
143148 """Process the convolution 2d transpose operation."""
144149 if 'target_shape' in attrs :
145- raise_attribute_unimplemented ('target_shape' , 'conv2d_transpose' )
150+ raise tvm .error .OpAttributeUnimplemented (
151+ 'Attribute "target_shape" is not supported in operator Conv2D-transpose.' )
146152 kernel = parse_tshape (required_attr (attrs , 'kernel' , 'conv2d_transpose' ))
147153 if len (kernel ) != 2 :
148- raise_attribute_unimplemented ('non-2d kernel' , 'conv2d_transpose' )
154+ raise tvm .error .OpAttributeUnimplemented (
155+ 'Non-2D kernels are not supported in operator Conv2D-transpose.' )
149156 layout = attrs .get ('layout' , 'NCHW' )
150157 if layout not in ['NCHW' , 'NHWC' ]:
151- raise_attribute_invalid (layout , 'layout' , 'conv2d_transpose' )
158+ msg = 'Value {} in attribute "layout" of operator Conv2D-transpose is not valid.'
159+ raise tvm .error .OpAttributeInvalid (msg .format (layout ))
152160 op_name , new_attrs = 'conv2d_transpose' , {}
153161 new_attrs ['channels' ] = required_attr (attrs , 'num_filter' , 'conv2d_transpose' )
154162 new_attrs ['kernel_size' ] = kernel
@@ -222,7 +230,8 @@ def _darknet_dropout(inputs, attrs):
222230def _darknet_reshape (inputs , attrs ):
223231 """Process the reshape operation."""
224232 if parse_bool_str (attrs , 'reverse' ):
225- raise_attribute_unimplemented ('reverse' , 'reshape' )
233+ raise tvm .error .OpAttributeUnimplemented (
234+ 'Attribute "reverse" is not supported in operator Reshape.' )
226235 op_name , new_attrs = 'reshape' , {}
227236 new_attrs ['shape' ] = required_attr (attrs , 'shape' , 'reshape' )
228237 return get_nnvm_op (op_name )(* inputs , ** new_attrs ), None
@@ -324,7 +333,8 @@ def _darknet_activations(inputs, attrs):
324333 elif ACTIVATION .ELU == act :
325334 act_type = 'elu'
326335 else :
327- raise_operator_unimplemented ('act: ' + act )
336+ raise tvm .error .OpNotImplemented (
337+ 'Operator act: {} is not supported in framework Darknet.' .format (act ))
328338
329339 if act_type in ['relu' , 'tanh' ]:
330340 op_name , new_attrs = act_type , {}
@@ -339,7 +349,8 @@ def _darknet_activations(inputs, attrs):
339349 op_name , new_attrs = act_type , {}
340350 sym = get_nnvm_op (op_name )(* inputs , ** new_attrs )
341351 else :
342- raise_operator_unimplemented ('act_type: ' + act_type )
352+ raise tvm .error .OpNotImplemented (
353+ 'Operator act: {} is not supported in framework Darknet.' .format (act ))
343354 return sym , None
344355
345356def _darknet_op_not_support (inputs , attrs ):
@@ -402,7 +413,8 @@ def _darknet_convert_symbol(op_name, inputs, attrs):
402413 if op_name in _DARKNET_CONVERT_MAP :
403414 sym , out_name = _DARKNET_CONVERT_MAP [op_name ](inputs , attrs )
404415 else :
405- raise_operator_unimplemented (op_name )
416+ raise tvm .error .OpNotImplemented (
417+ 'Operator {} is not supported in frontend Darknet.' .format (op_name ))
406418 if out_name is None :
407419 out_name = sym .list_output_names ()[0 ].replace ('_output' , '' )
408420 return out_name , sym
@@ -448,9 +460,10 @@ def _get_convolution_weights(self, layer, opname):
448460 if layer .nweights == 0 :
449461 return
450462
451- if (layer .n * layer .c * layer .size * layer .size ) != layer .nweights :
452- raise_attribute_invalid (layer .n * layer .c * layer .size * layer .size ,
453- 'layer weights size' , 'conv2d' )
463+ if layer .n * layer .c * layer .size * layer .size != layer .nweights :
464+ msg = 'nweights ({}) != n * c * h * w ({}) in operator {}'
465+ msg = msg .format (layer .nweights , layer .n * layer .c * layer .size ** 2 , opname )
466+ raise tvm .error .OpAttributeInvalid (msg )
454467
455468 shape = (layer .n , layer .c , layer .size , layer .size )
456469 weights = self ._read_memory_buffer (shape , layer .weights )
@@ -630,7 +643,8 @@ def _get_darknet_attrs(self, layer, layer_num):
630643 pass
631644
632645 else :
633- raise_operator_unimplemented (layer .type )
646+ raise tvm .error .OpNotImplemented (
647+ 'Operator {} is not supported in frontend Darknet.' .format (layer .type ))
634648
635649 return attr
636650
@@ -763,7 +777,8 @@ def _handle_darknet_rnn_layers(self, layer_num, sym):
763777
764778 elif LAYERTYPE .LSTM == layer .type :
765779 if layer .steps > 1 :
766- raise_attribute_invalid (layer .steps , 'number of steps' , 'RNN' )
780+ raise tvm .error .OpAttributeInvalid (
781+ 'Number of steps {} of RNN is not valid.' .format (layer .steps ))
767782
768783 op_name_add = 'elemwise_add'
769784 op_name_mul = 'elemwise_mul'
@@ -829,7 +844,8 @@ def _handle_darknet_rnn_layers(self, layer_num, sym):
829844
830845 elif LAYERTYPE .GRU == layer .type :
831846 if layer .steps > 1 :
832- raise_attribute_invalid (layer .steps , 'number of steps' , 'RNN' )
847+ raise tvm .error .OpAttributeInvalid (
848+ 'Number of steps {} is not valid in RNN.' .format (layer .steps ))
833849
834850 op_name_add = 'elemwise_add'
835851 op_name_mul = 'elemwise_mul'
0 commit comments