1*4882a593Smuzhiyunimport numpy as np 2*4882a593Smuzhiyunimport cv2 3*4882a593Smuzhiyunfrom rknn.api import RKNN 4*4882a593Smuzhiyun 5*4882a593Smuzhiyunif __name__ == '__main__': 6*4882a593Smuzhiyun 7*4882a593Smuzhiyun # Create RKNN object 8*4882a593Smuzhiyun rknn = RKNN(verbose=True) 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun # Pre-process config 11*4882a593Smuzhiyun print('--> Config model') 12*4882a593Smuzhiyun rknn.config(mean_values=[127.5, 127.5, 127.5], std_values=[127.5, 127.5, 127.5]) 13*4882a593Smuzhiyun print('done') 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun # Load model 16*4882a593Smuzhiyun print('--> Loading model') 17*4882a593Smuzhiyun ret = rknn.load_tensorflow(tf_pb='./ssd_mobilenet_v2.pb', 18*4882a593Smuzhiyun inputs=['FeatureExtractor/MobilenetV2/MobilenetV2/input'], 19*4882a593Smuzhiyun outputs=['concat_1', 'concat'], 20*4882a593Smuzhiyun input_size_list=[[1,300,300,3]]) 21*4882a593Smuzhiyun if ret != 0: 22*4882a593Smuzhiyun print('Load model failed!') 23*4882a593Smuzhiyun exit(ret) 24*4882a593Smuzhiyun print('done') 25*4882a593Smuzhiyun 26*4882a593Smuzhiyun # Build model 27*4882a593Smuzhiyun print('--> hybrid_quantization_step1') 28*4882a593Smuzhiyun ret = rknn.hybrid_quantization_step1(dataset='./dataset.txt', proposal=False) 29*4882a593Smuzhiyun if ret != 0: 30*4882a593Smuzhiyun print('hybrid_quantization_step1 failed!') 31*4882a593Smuzhiyun exit(ret) 32*4882a593Smuzhiyun print('done') 33*4882a593Smuzhiyun 34*4882a593Smuzhiyun # Tips 35*4882a593Smuzhiyun print('Please modify ssd_mobilenet_v2.quantization.cfg!') 36*4882a593Smuzhiyun print('==================================================================================================') 37*4882a593Smuzhiyun print('Modify Method: Fill the customized_quantize_layers with the output name & dtype of the custom layer.') 38*4882a593Smuzhiyun print('') 39*4882a593Smuzhiyun print('For example:') 40*4882a593Smuzhiyun print(' custom_quantize_layers:') 41*4882a593Smuzhiyun print(' FeatureExtractor/MobilenetV2/expanded_conv/depthwise/BatchNorm/batchnorm/add_1:0: float16') 42*4882a593Smuzhiyun print(' FeatureExtractor/MobilenetV2/expanded_conv/depthwise/Relu6:0: float16') 43*4882a593Smuzhiyun print('Or:') 44*4882a593Smuzhiyun print(' custom_quantize_layers: {') 45*4882a593Smuzhiyun print(' FeatureExtractor/MobilenetV2/expanded_conv/depthwise/BatchNorm/batchnorm/add_1:0: float16,') 46*4882a593Smuzhiyun print(' FeatureExtractor/MobilenetV2/expanded_conv/depthwise/Relu6:0: float16,') 47*4882a593Smuzhiyun print(' }') 48*4882a593Smuzhiyun print('==================================================================================================') 49*4882a593Smuzhiyun 50*4882a593Smuzhiyun rknn.release() 51*4882a593Smuzhiyun 52