Lines Matching full:if
97 if int16xint8_int32:
98 if not self.is_int16xint8:
100 if not self.test_type == 'conv':
104 if self.test_type == 'depthwise_conv':
106 if self.output_ch % self.input_ch != 0:
108 if groups != 1:
114 if in_ch % groups != 0:
116 if out_ch % groups != 0:
119 if self.int4_weights:
120 if self.test_type == 'conv':
134 if self.test_type == 'depthwise_conv':
140 if self.groups != 1:
142 if self.test_type == 'transpose_conv':
151 if len(self.scaling_factors) != num_channels:
178 if data is not None:
179 if tf_tensor:
184 if quantization_type.lower() == "affine":
201 scale = 0.1 if scale == 0 else scale
206 if self.is_int16xint8:
209 bias_datatype = "int32_t" if self.int16xint8_int32 else "int64_t"
218 if self.test_type == 'conv' or self.test_type == 'transpose_conv':
223 if self.int4_weights:
226 if weights is not None:
239 if w_shape[0] % 2:
242 if self.test_type == 'depthwise_conv':
245 if self.generate_bias:
262 if self.has_padding:
305 if self.test_type == 'depthwise_conv':
318 if self.test_type == 'transpose_conv':
323 if weights is not None:
337 if self.test_type == 'conv':
347 if self.generate_bias:
360 if self.generate_bias:
374 if self.generate_bias:
379 if self.test_type == 'transpose_conv' and self.generate_bias:
396 if not self.int4_weights and not self.generate_bias:
402 if self.int4_weights:
407 if weights.numpy().size != expected_weight_size or \
416 if self.test_type == 'transpose_conv':
430 if self.generate_bias: