Note
Click here to download the full example code
MobileNet/ImageNet inference¶
This CNN2SNN tutorial presents how to convert a MobileNet pre-trained model into Akida.
As ImageNet images are not publicly available, performances are assessed using a set of 10 copyright free images that were found on Google using ImageNet class names.
1. Dataset preparation¶
Test images all have at least 256 pixels in the smallest dimension. They must
be preprocessed to fit in the model. The
imagenet.preprocessing.resize_and_crop
function decodes, crops and
extracts a square 224x224x3 patch from an input image.
Note
Input size is here set to 224x224x3 as this is what is used by the model presented in the next section.
import os
import numpy as np
from tensorflow.io import read_file
from tensorflow.keras.utils import get_file
from akida_models.imagenet import preprocessing
# Model specification and hyperparameters
NUM_CHANNELS = 3
IMAGE_SIZE = 224
NUM_CLASSES = 1000
num_images = 10
# Retrieve dataset file from Brainchip data server
file_path = get_file(
"imagenet_like.zip",
"http://data.brainchip.com/dataset-mirror/imagenet_like/imagenet_like.zip",
cache_subdir='datasets/imagenet_like',
extract=True)
data_folder = os.path.dirname(file_path)
# Load images for test set
x_test_files = []
x_test = np.zeros((num_images, 224, 224, 3)).astype('uint8')
for id in range(num_images):
test_file = 'image_' + str(id + 1).zfill(2) + '.jpg'
x_test_files.append(test_file)
img_path = os.path.join(data_folder, test_file)
base_image = read_file(img_path)
image = preprocessing.resize_and_crop(image_buffer=base_image,
output_width=IMAGE_SIZE,
output_height=IMAGE_SIZE,
num_channels=NUM_CHANNELS)
x_test[id, :, :, :] = np.expand_dims(image, axis=0)
# Rescale images for Keras model (normalization between -1 and 1)
# Assume rescaling format of (x - b)/a
a = 128
b = 128
input_scaling = (a, b)
x_test_preprocess = (x_test.astype('float32') - b) / a
print(f'{num_images} images loaded and preprocessed.')
Out:
Downloading data from http://data.brainchip.com/dataset-mirror/imagenet_like/imagenet_like.zip
8192/20418307 [..............................] - ETA: 46s
73728/20418307 [..............................] - ETA: 21s
270336/20418307 [..............................] - ETA: 10s
466944/20418307 [..............................] - ETA: 8s
663552/20418307 [..............................] - ETA: 7s
860160/20418307 [>.............................] - ETA: 7s
1056768/20418307 [>.............................] - ETA: 6s
1253376/20418307 [>.............................] - ETA: 6s
1449984/20418307 [=>............................] - ETA: 6s
1646592/20418307 [=>............................] - ETA: 6s
1843200/20418307 [=>............................] - ETA: 6s
2039808/20418307 [=>............................] - ETA: 6s
2236416/20418307 [==>...........................] - ETA: 5s
2433024/20418307 [==>...........................] - ETA: 5s
2629632/20418307 [==>...........................] - ETA: 5s
2826240/20418307 [===>..........................] - ETA: 5s
3022848/20418307 [===>..........................] - ETA: 5s
3219456/20418307 [===>..........................] - ETA: 5s
3416064/20418307 [====>.........................] - ETA: 5s
3612672/20418307 [====>.........................] - ETA: 5s
3809280/20418307 [====>.........................] - ETA: 5s
4005888/20418307 [====>.........................] - ETA: 5s
4202496/20418307 [=====>........................] - ETA: 5s
4399104/20418307 [=====>........................] - ETA: 5s
4595712/20418307 [=====>........................] - ETA: 5s
4792320/20418307 [======>.......................] - ETA: 4s
4988928/20418307 [======>.......................] - ETA: 4s
5185536/20418307 [======>.......................] - ETA: 4s
5382144/20418307 [======>.......................] - ETA: 4s
5578752/20418307 [=======>......................] - ETA: 4s
5775360/20418307 [=======>......................] - ETA: 4s
5971968/20418307 [=======>......................] - ETA: 4s
6168576/20418307 [========>.....................] - ETA: 4s
6365184/20418307 [========>.....................] - ETA: 4s
6561792/20418307 [========>.....................] - ETA: 4s
6758400/20418307 [========>.....................] - ETA: 4s
6955008/20418307 [=========>....................] - ETA: 4s
7151616/20418307 [=========>....................] - ETA: 4s
7348224/20418307 [=========>....................] - ETA: 4s
7544832/20418307 [==========>...................] - ETA: 4s
7741440/20418307 [==========>...................] - ETA: 3s
7938048/20418307 [==========>...................] - ETA: 3s
8134656/20418307 [==========>...................] - ETA: 3s
8331264/20418307 [===========>..................] - ETA: 3s
8527872/20418307 [===========>..................] - ETA: 3s
8724480/20418307 [===========>..................] - ETA: 3s
8921088/20418307 [============>.................] - ETA: 3s
9117696/20418307 [============>.................] - ETA: 3s
9314304/20418307 [============>.................] - ETA: 3s
9510912/20418307 [============>.................] - ETA: 3s
9707520/20418307 [=============>................] - ETA: 3s
9904128/20418307 [=============>................] - ETA: 3s
10100736/20418307 [=============>................] - ETA: 3s
10297344/20418307 [==============>...............] - ETA: 3s
10493952/20418307 [==============>...............] - ETA: 3s
10690560/20418307 [==============>...............] - ETA: 3s
10887168/20418307 [==============>...............] - ETA: 2s
11083776/20418307 [===============>..............] - ETA: 2s
11280384/20418307 [===============>..............] - ETA: 2s
11476992/20418307 [===============>..............] - ETA: 2s
11673600/20418307 [================>.............] - ETA: 2s
11870208/20418307 [================>.............] - ETA: 2s
12066816/20418307 [================>.............] - ETA: 2s
12263424/20418307 [=================>............] - ETA: 2s
12460032/20418307 [=================>............] - ETA: 2s
12656640/20418307 [=================>............] - ETA: 2s
12853248/20418307 [=================>............] - ETA: 2s
13049856/20418307 [==================>...........] - ETA: 2s
13246464/20418307 [==================>...........] - ETA: 2s
13443072/20418307 [==================>...........] - ETA: 2s
13639680/20418307 [===================>..........] - ETA: 2s
13836288/20418307 [===================>..........] - ETA: 2s
14032896/20418307 [===================>..........] - ETA: 1s
14229504/20418307 [===================>..........] - ETA: 1s
14426112/20418307 [====================>.........] - ETA: 1s
14622720/20418307 [====================>.........] - ETA: 1s
14819328/20418307 [====================>.........] - ETA: 1s
15015936/20418307 [=====================>........] - ETA: 1s
15212544/20418307 [=====================>........] - ETA: 1s
15409152/20418307 [=====================>........] - ETA: 1s
15605760/20418307 [=====================>........] - ETA: 1s
15802368/20418307 [======================>.......] - ETA: 1s
15998976/20418307 [======================>.......] - ETA: 1s
16195584/20418307 [======================>.......] - ETA: 1s
16392192/20418307 [=======================>......] - ETA: 1s
16588800/20418307 [=======================>......] - ETA: 1s
16785408/20418307 [=======================>......] - ETA: 1s
16982016/20418307 [=======================>......] - ETA: 1s
17178624/20418307 [========================>.....] - ETA: 0s
17375232/20418307 [========================>.....] - ETA: 0s
17571840/20418307 [========================>.....] - ETA: 0s
17768448/20418307 [=========================>....] - ETA: 0s
17965056/20418307 [=========================>....] - ETA: 0s
18161664/20418307 [=========================>....] - ETA: 0s
18358272/20418307 [=========================>....] - ETA: 0s
18554880/20418307 [==========================>...] - ETA: 0s
18751488/20418307 [==========================>...] - ETA: 0s
18948096/20418307 [==========================>...] - ETA: 0s
19144704/20418307 [===========================>..] - ETA: 0s
19341312/20418307 [===========================>..] - ETA: 0s
19537920/20418307 [===========================>..] - ETA: 0s
19734528/20418307 [===========================>..] - ETA: 0s
19931136/20418307 [============================>.] - ETA: 0s
20127744/20418307 [============================>.] - ETA: 0s
20324352/20418307 [============================>.] - ETA: 0s
20422656/20418307 [==============================] - 6s 0us/step
10 images loaded and preprocessed.
Note
Akida Execution Engine is configured to take 8-bit inputs without rescaling. For conversion, rescaling values used for training the Keras model are needed.
Labels for test images are stored in the akida_models package. The matching
between names (string) and labels (integer) is given through the
imagenet.preprocessing.index_to_label
method.
import csv
# Parse labels file
fname = os.path.join(data_folder, 'labels_validation.txt')
validation_labels = dict()
with open(fname, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter=' ')
for row in reader:
validation_labels[row[0]] = row[1]
# Get labels for the test set by index
labels_test = np.zeros(num_images)
for i in range(num_images):
labels_test[i] = int(validation_labels[x_test_files[i]])
2. Create a Keras MobileNet model¶
The MobileNet architecture is available in the Akida model zoo as mobilenet_imagenet.
from tensorflow.keras.models import load_model
# Retrieve the float model with pretrained weights and load it
model_file = get_file(
"mobilenet_imagenet.h5",
"http://data.brainchip.com/models/mobilenet/mobilenet_imagenet.h5",
cache_subdir='models/mobilenet_imagenet')
model_keras = load_model(model_file)
model_keras.summary()
Out:
Downloading data from http://data.brainchip.com/models/mobilenet/mobilenet_imagenet.h5
8192/17121320 [..............................] - ETA: 39s
73728/17121320 [..............................] - ETA: 17s
270336/17121320 [..............................] - ETA: 8s
466944/17121320 [..............................] - ETA: 7s
663552/17121320 [>.............................] - ETA: 6s
860160/17121320 [>.............................] - ETA: 5s
1056768/17121320 [>.............................] - ETA: 5s
1253376/17121320 [=>............................] - ETA: 5s
1449984/17121320 [=>............................] - ETA: 5s
1646592/17121320 [=>............................] - ETA: 5s
1843200/17121320 [==>...........................] - ETA: 5s
2039808/17121320 [==>...........................] - ETA: 4s
2236416/17121320 [==>...........................] - ETA: 4s
2433024/17121320 [===>..........................] - ETA: 4s
2629632/17121320 [===>..........................] - ETA: 4s
2826240/17121320 [===>..........................] - ETA: 4s
3022848/17121320 [====>.........................] - ETA: 4s
3219456/17121320 [====>.........................] - ETA: 4s
3416064/17121320 [====>.........................] - ETA: 4s
3612672/17121320 [=====>........................] - ETA: 4s
3809280/17121320 [=====>........................] - ETA: 4s
4005888/17121320 [======>.......................] - ETA: 4s
4202496/17121320 [======>.......................] - ETA: 4s
4399104/17121320 [======>.......................] - ETA: 4s
4595712/17121320 [=======>......................] - ETA: 3s
4792320/17121320 [=======>......................] - ETA: 3s
4988928/17121320 [=======>......................] - ETA: 3s
5185536/17121320 [========>.....................] - ETA: 3s
5382144/17121320 [========>.....................] - ETA: 3s
5578752/17121320 [========>.....................] - ETA: 3s
5775360/17121320 [=========>....................] - ETA: 3s
5971968/17121320 [=========>....................] - ETA: 3s
6168576/17121320 [=========>....................] - ETA: 3s
6365184/17121320 [==========>...................] - ETA: 3s
6561792/17121320 [==========>...................] - ETA: 3s
6758400/17121320 [==========>...................] - ETA: 3s
6955008/17121320 [===========>..................] - ETA: 3s
7151616/17121320 [===========>..................] - ETA: 3s
7348224/17121320 [===========>..................] - ETA: 3s
7544832/17121320 [============>.................] - ETA: 2s
7741440/17121320 [============>.................] - ETA: 2s
7938048/17121320 [============>.................] - ETA: 2s
8134656/17121320 [=============>................] - ETA: 2s
8331264/17121320 [=============>................] - ETA: 2s
8527872/17121320 [=============>................] - ETA: 2s
8724480/17121320 [==============>...............] - ETA: 2s
8921088/17121320 [==============>...............] - ETA: 2s
9117696/17121320 [==============>...............] - ETA: 2s
9314304/17121320 [===============>..............] - ETA: 2s
9510912/17121320 [===============>..............] - ETA: 2s
9707520/17121320 [================>.............] - ETA: 2s
9904128/17121320 [================>.............] - ETA: 2s
10100736/17121320 [================>.............] - ETA: 2s
10297344/17121320 [=================>............] - ETA: 2s
10493952/17121320 [=================>............] - ETA: 2s
10690560/17121320 [=================>............] - ETA: 1s
10887168/17121320 [==================>...........] - ETA: 1s
11083776/17121320 [==================>...........] - ETA: 1s
11280384/17121320 [==================>...........] - ETA: 1s
11476992/17121320 [===================>..........] - ETA: 1s
11673600/17121320 [===================>..........] - ETA: 1s
11870208/17121320 [===================>..........] - ETA: 1s
12066816/17121320 [====================>.........] - ETA: 1s
12263424/17121320 [====================>.........] - ETA: 1s
12460032/17121320 [====================>.........] - ETA: 1s
12656640/17121320 [=====================>........] - ETA: 1s
12853248/17121320 [=====================>........] - ETA: 1s
13049856/17121320 [=====================>........] - ETA: 1s
13246464/17121320 [======================>.......] - ETA: 1s
13443072/17121320 [======================>.......] - ETA: 1s
13639680/17121320 [======================>.......] - ETA: 1s
13836288/17121320 [=======================>......] - ETA: 1s
14032896/17121320 [=======================>......] - ETA: 0s
14229504/17121320 [=======================>......] - ETA: 0s
14426112/17121320 [========================>.....] - ETA: 0s
14622720/17121320 [========================>.....] - ETA: 0s
14819328/17121320 [========================>.....] - ETA: 0s
15015936/17121320 [=========================>....] - ETA: 0s
15212544/17121320 [=========================>....] - ETA: 0s
15409152/17121320 [=========================>....] - ETA: 0s
15605760/17121320 [==========================>...] - ETA: 0s
15802368/17121320 [==========================>...] - ETA: 0s
15998976/17121320 [===========================>..] - ETA: 0s
16195584/17121320 [===========================>..] - ETA: 0s
16392192/17121320 [===========================>..] - ETA: 0s
16588800/17121320 [============================>.] - ETA: 0s
16785408/17121320 [============================>.] - ETA: 0s
16982016/17121320 [============================>.] - ETA: 0s
17129472/17121320 [==============================] - 5s 0us/step
Model: "mobilenet_1.00_224"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_27 (InputLayer) [(None, 224, 224, 3)] 0
_________________________________________________________________
conv_0 (Conv2D) (None, 112, 112, 32) 864
_________________________________________________________________
conv_0_BN (BatchNormalizatio (None, 112, 112, 32) 128
_________________________________________________________________
conv_0_relu (ReLU) (None, 112, 112, 32) 0
_________________________________________________________________
separable_1 (SeparableConv2D (None, 112, 112, 64) 2336
_________________________________________________________________
separable_1_BN (BatchNormali (None, 112, 112, 64) 256
_________________________________________________________________
separable_1_relu (ReLU) (None, 112, 112, 64) 0
_________________________________________________________________
separable_2 (SeparableConv2D (None, 112, 112, 128) 8768
_________________________________________________________________
separable_2_maxpool (MaxPool (None, 56, 56, 128) 0
_________________________________________________________________
separable_2_BN (BatchNormali (None, 56, 56, 128) 512
_________________________________________________________________
separable_2_relu (ReLU) (None, 56, 56, 128) 0
_________________________________________________________________
separable_3 (SeparableConv2D (None, 56, 56, 128) 17536
_________________________________________________________________
separable_3_BN (BatchNormali (None, 56, 56, 128) 512
_________________________________________________________________
separable_3_relu (ReLU) (None, 56, 56, 128) 0
_________________________________________________________________
separable_4 (SeparableConv2D (None, 56, 56, 256) 33920
_________________________________________________________________
separable_4_maxpool (MaxPool (None, 28, 28, 256) 0
_________________________________________________________________
separable_4_BN (BatchNormali (None, 28, 28, 256) 1024
_________________________________________________________________
separable_4_relu (ReLU) (None, 28, 28, 256) 0
_________________________________________________________________
separable_5 (SeparableConv2D (None, 28, 28, 256) 67840
_________________________________________________________________
separable_5_BN (BatchNormali (None, 28, 28, 256) 1024
_________________________________________________________________
separable_5_relu (ReLU) (None, 28, 28, 256) 0
_________________________________________________________________
separable_6 (SeparableConv2D (None, 28, 28, 512) 133376
_________________________________________________________________
separable_6_maxpool (MaxPool (None, 14, 14, 512) 0
_________________________________________________________________
separable_6_BN (BatchNormali (None, 14, 14, 512) 2048
_________________________________________________________________
separable_6_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_7 (SeparableConv2D (None, 14, 14, 512) 266752
_________________________________________________________________
separable_7_BN (BatchNormali (None, 14, 14, 512) 2048
_________________________________________________________________
separable_7_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_8 (SeparableConv2D (None, 14, 14, 512) 266752
_________________________________________________________________
separable_8_BN (BatchNormali (None, 14, 14, 512) 2048
_________________________________________________________________
separable_8_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_9 (SeparableConv2D (None, 14, 14, 512) 266752
_________________________________________________________________
separable_9_BN (BatchNormali (None, 14, 14, 512) 2048
_________________________________________________________________
separable_9_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_10 (SeparableConv2 (None, 14, 14, 512) 266752
_________________________________________________________________
separable_10_BN (BatchNormal (None, 14, 14, 512) 2048
_________________________________________________________________
separable_10_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_11 (SeparableConv2 (None, 14, 14, 512) 266752
_________________________________________________________________
separable_11_BN (BatchNormal (None, 14, 14, 512) 2048
_________________________________________________________________
separable_11_relu (ReLU) (None, 14, 14, 512) 0
_________________________________________________________________
separable_12 (SeparableConv2 (None, 14, 14, 1024) 528896
_________________________________________________________________
separable_12_maxpool (MaxPoo (None, 7, 7, 1024) 0
_________________________________________________________________
separable_12_BN (BatchNormal (None, 7, 7, 1024) 4096
_________________________________________________________________
separable_12_relu (ReLU) (None, 7, 7, 1024) 0
_________________________________________________________________
separable_13 (SeparableConv2 (None, 7, 7, 1024) 1057792
_________________________________________________________________
separable_13_global_avg (Glo (None, 1024) 0
_________________________________________________________________
separable_13_BN (BatchNormal (None, 1024) 4096
_________________________________________________________________
separable_13_relu (ReLU) (None, 1024) 0
_________________________________________________________________
reshape_1 (Reshape) (None, 1, 1, 1024) 0
_________________________________________________________________
dropout (Dropout) (None, 1, 1, 1024) 0
_________________________________________________________________
separable_14 (SeparableConv2 (None, 1, 1, 1000) 1033216
_________________________________________________________________
act_softmax (Activation) (None, 1, 1, 1000) 0
_________________________________________________________________
reshape_2 (Reshape) (None, 1000) 0
=================================================================
Total params: 4,242,240
Trainable params: 4,230,272
Non-trainable params: 11,968
_________________________________________________________________
Top-1 accuracy on the actual ImageNet is 71.93%, the perfomance given below uses the 10 images subset.
from timeit import default_timer as timer
# Check model performance
def check_model_performances(model,
x_test=x_test_preprocess,
labels_test=labels_test):
num_images = len(x_test)
start = timer()
potentials_keras = model.predict(x_test, batch_size=100)
end = timer()
print(f'Keras inference on {num_images} images took {end-start:.2f} s.\n')
preds_keras = np.squeeze(np.argmax(potentials_keras, 1))
accuracy_keras = np.sum(np.equal(preds_keras, labels_test)) / num_images
print(f"Keras accuracy: {accuracy_keras*100:.2f} %")
check_model_performances(model_keras)
Out:
Keras inference on 10 images took 0.46 s.
Keras accuracy: 100.00 %
3. Quantized model¶
Quantizing a model is done using cnn2snn.quantize.
The quantized model satisfies the Akida NSoC requirements:
the first layer has 8-bit weights,
all other convolutional layers have 4-bit weights,
all convolutional layers have 4-bit activations.
However, this model will suffer from a drop in accuracy due to quantization as shown in the table below for ImageNet and in the next cell for the 10 images set.
Float accuracy |
Quantized accuracy |
---|---|
71.93 % |
9.52 % |
from cnn2snn import quantize
# Quantize the model to 4-bit weights and activations, 8-bit weights for the
# first convolutional layer
model_keras_quantized = quantize(model_keras, 4, 4, 8)
# Check Model performance
check_model_performances(model_keras_quantized)
Out:
Keras inference on 10 images took 0.58 s.
Keras accuracy: 30.00 %
4. Pretrained quantized model¶
The Akida models zoo also contains a pretrained quantized helper that was obtained after fine tuning the model for 30 epochs.
Tuning the model, that is training with a lowered learning rate, allows to recover performances up to the initial floating point accuracy.
Performances on the full ImageNet dataset are:
Float accuracy |
Quantized accuracy |
After tuning |
---|---|---|
71.93 % |
9.52 % |
69.63 % |
from akida_models import mobilenet_imagenet_pretrained
# Use a quantized model with pretrained quantized weights
model_keras_quantized_pretrained = mobilenet_imagenet_pretrained()
model_keras_quantized_pretrained.summary()
Out:
Downloading data from http://data.brainchip.com/models/mobilenet/mobilenet_imagenet_iq8_wq4_aq4.h5
8192/33988120 [..............................] - ETA: 1:18
73728/33988120 [..............................] - ETA: 35s
204800/33988120 [..............................] - ETA: 22s
401408/33988120 [..............................] - ETA: 16s
598016/33988120 [..............................] - ETA: 14s
794624/33988120 [..............................] - ETA: 13s
991232/33988120 [..............................] - ETA: 12s
1187840/33988120 [>.............................] - ETA: 12s
1384448/33988120 [>.............................] - ETA: 11s
1581056/33988120 [>.............................] - ETA: 11s
1777664/33988120 [>.............................] - ETA: 11s
1974272/33988120 [>.............................] - ETA: 11s
2170880/33988120 [>.............................] - ETA: 10s
2367488/33988120 [=>............................] - ETA: 10s
2564096/33988120 [=>............................] - ETA: 10s
2760704/33988120 [=>............................] - ETA: 10s
2957312/33988120 [=>............................] - ETA: 10s
3153920/33988120 [=>............................] - ETA: 10s
3350528/33988120 [=>............................] - ETA: 10s
3547136/33988120 [==>...........................] - ETA: 9s
3743744/33988120 [==>...........................] - ETA: 9s
3940352/33988120 [==>...........................] - ETA: 9s
4136960/33988120 [==>...........................] - ETA: 9s
4333568/33988120 [==>...........................] - ETA: 9s
4530176/33988120 [==>...........................] - ETA: 9s
4726784/33988120 [===>..........................] - ETA: 9s
4923392/33988120 [===>..........................] - ETA: 9s
5120000/33988120 [===>..........................] - ETA: 9s
5316608/33988120 [===>..........................] - ETA: 9s
5513216/33988120 [===>..........................] - ETA: 9s
5709824/33988120 [====>.........................] - ETA: 9s
5906432/33988120 [====>.........................] - ETA: 8s
6103040/33988120 [====>.........................] - ETA: 8s
6299648/33988120 [====>.........................] - ETA: 8s
6496256/33988120 [====>.........................] - ETA: 8s
6692864/33988120 [====>.........................] - ETA: 8s
6889472/33988120 [=====>........................] - ETA: 8s
7086080/33988120 [=====>........................] - ETA: 8s
7282688/33988120 [=====>........................] - ETA: 8s
7479296/33988120 [=====>........................] - ETA: 8s
7675904/33988120 [=====>........................] - ETA: 8s
7872512/33988120 [=====>........................] - ETA: 8s
8069120/33988120 [======>.......................] - ETA: 8s
8265728/33988120 [======>.......................] - ETA: 8s
8462336/33988120 [======>.......................] - ETA: 8s
8658944/33988120 [======>.......................] - ETA: 7s
8855552/33988120 [======>.......................] - ETA: 7s
9052160/33988120 [======>.......................] - ETA: 7s
9248768/33988120 [=======>......................] - ETA: 7s
9445376/33988120 [=======>......................] - ETA: 7s
9641984/33988120 [=======>......................] - ETA: 7s
9838592/33988120 [=======>......................] - ETA: 7s
10035200/33988120 [=======>......................] - ETA: 7s
10231808/33988120 [========>.....................] - ETA: 7s
10428416/33988120 [========>.....................] - ETA: 7s
10625024/33988120 [========>.....................] - ETA: 7s
10821632/33988120 [========>.....................] - ETA: 7s
11018240/33988120 [========>.....................] - ETA: 7s
11214848/33988120 [========>.....................] - ETA: 7s
11411456/33988120 [=========>....................] - ETA: 7s
11608064/33988120 [=========>....................] - ETA: 6s
11804672/33988120 [=========>....................] - ETA: 6s
12001280/33988120 [=========>....................] - ETA: 6s
12197888/33988120 [=========>....................] - ETA: 6s
12394496/33988120 [=========>....................] - ETA: 6s
12591104/33988120 [==========>...................] - ETA: 6s
12787712/33988120 [==========>...................] - ETA: 6s
12984320/33988120 [==========>...................] - ETA: 6s
13180928/33988120 [==========>...................] - ETA: 6s
13377536/33988120 [==========>...................] - ETA: 6s
13574144/33988120 [==========>...................] - ETA: 6s
13770752/33988120 [===========>..................] - ETA: 6s
13967360/33988120 [===========>..................] - ETA: 6s
14163968/33988120 [===========>..................] - ETA: 6s
14360576/33988120 [===========>..................] - ETA: 6s
14557184/33988120 [===========>..................] - ETA: 6s
14753792/33988120 [============>.................] - ETA: 5s
14950400/33988120 [============>.................] - ETA: 5s
15147008/33988120 [============>.................] - ETA: 5s
15343616/33988120 [============>.................] - ETA: 5s
15540224/33988120 [============>.................] - ETA: 5s
15736832/33988120 [============>.................] - ETA: 5s
15933440/33988120 [=============>................] - ETA: 5s
16130048/33988120 [=============>................] - ETA: 5s
16326656/33988120 [=============>................] - ETA: 5s
16523264/33988120 [=============>................] - ETA: 5s
16719872/33988120 [=============>................] - ETA: 5s
16916480/33988120 [=============>................] - ETA: 5s
17113088/33988120 [==============>...............] - ETA: 5s
17309696/33988120 [==============>...............] - ETA: 5s
17506304/33988120 [==============>...............] - ETA: 5s
17702912/33988120 [==============>...............] - ETA: 5s
17899520/33988120 [==============>...............] - ETA: 4s
18096128/33988120 [==============>...............] - ETA: 4s
18292736/33988120 [===============>..............] - ETA: 4s
18489344/33988120 [===============>..............] - ETA: 4s
18685952/33988120 [===============>..............] - ETA: 4s
18882560/33988120 [===============>..............] - ETA: 4s
19079168/33988120 [===============>..............] - ETA: 4s
19275776/33988120 [================>.............] - ETA: 4s
19472384/33988120 [================>.............] - ETA: 4s
19668992/33988120 [================>.............] - ETA: 4s
19865600/33988120 [================>.............] - ETA: 4s
20062208/33988120 [================>.............] - ETA: 4s
20258816/33988120 [================>.............] - ETA: 4s
20455424/33988120 [=================>............] - ETA: 4s
20652032/33988120 [=================>............] - ETA: 4s
20848640/33988120 [=================>............] - ETA: 4s
21045248/33988120 [=================>............] - ETA: 4s
21241856/33988120 [=================>............] - ETA: 3s
21438464/33988120 [=================>............] - ETA: 3s
21635072/33988120 [==================>...........] - ETA: 3s
21831680/33988120 [==================>...........] - ETA: 3s
22028288/33988120 [==================>...........] - ETA: 3s
22224896/33988120 [==================>...........] - ETA: 3s
22421504/33988120 [==================>...........] - ETA: 3s
22618112/33988120 [==================>...........] - ETA: 3s
22814720/33988120 [===================>..........] - ETA: 3s
23011328/33988120 [===================>..........] - ETA: 3s
23207936/33988120 [===================>..........] - ETA: 3s
23404544/33988120 [===================>..........] - ETA: 3s
23601152/33988120 [===================>..........] - ETA: 3s
23797760/33988120 [====================>.........] - ETA: 3s
23994368/33988120 [====================>.........] - ETA: 3s
24190976/33988120 [====================>.........] - ETA: 3s
24387584/33988120 [====================>.........] - ETA: 2s
24584192/33988120 [====================>.........] - ETA: 2s
24780800/33988120 [====================>.........] - ETA: 2s
24977408/33988120 [=====================>........] - ETA: 2s
25174016/33988120 [=====================>........] - ETA: 2s
25370624/33988120 [=====================>........] - ETA: 2s
25567232/33988120 [=====================>........] - ETA: 2s
25763840/33988120 [=====================>........] - ETA: 2s
25960448/33988120 [=====================>........] - ETA: 2s
26157056/33988120 [======================>.......] - ETA: 2s
26353664/33988120 [======================>.......] - ETA: 2s
26550272/33988120 [======================>.......] - ETA: 2s
26746880/33988120 [======================>.......] - ETA: 2s
26943488/33988120 [======================>.......] - ETA: 2s
27140096/33988120 [======================>.......] - ETA: 2s
27336704/33988120 [=======================>......] - ETA: 2s
27533312/33988120 [=======================>......] - ETA: 1s
27729920/33988120 [=======================>......] - ETA: 1s
27926528/33988120 [=======================>......] - ETA: 1s
28123136/33988120 [=======================>......] - ETA: 1s
28319744/33988120 [=======================>......] - ETA: 1s
28516352/33988120 [========================>.....] - ETA: 1s
28712960/33988120 [========================>.....] - ETA: 1s
28909568/33988120 [========================>.....] - ETA: 1s
29106176/33988120 [========================>.....] - ETA: 1s
29302784/33988120 [========================>.....] - ETA: 1s
29499392/33988120 [=========================>....] - ETA: 1s
29696000/33988120 [=========================>....] - ETA: 1s
29892608/33988120 [=========================>....] - ETA: 1s
30089216/33988120 [=========================>....] - ETA: 1s
30285824/33988120 [=========================>....] - ETA: 1s
30482432/33988120 [=========================>....] - ETA: 1s
30679040/33988120 [==========================>...] - ETA: 1s
30875648/33988120 [==========================>...] - ETA: 0s
31072256/33988120 [==========================>...] - ETA: 0s
31268864/33988120 [==========================>...] - ETA: 0s
31465472/33988120 [==========================>...] - ETA: 0s
31662080/33988120 [==========================>...] - ETA: 0s
31858688/33988120 [===========================>..] - ETA: 0s
32055296/33988120 [===========================>..] - ETA: 0s
32251904/33988120 [===========================>..] - ETA: 0s
32448512/33988120 [===========================>..] - ETA: 0s
32645120/33988120 [===========================>..] - ETA: 0s
32841728/33988120 [===========================>..] - ETA: 0s
33038336/33988120 [============================>.] - ETA: 0s
33234944/33988120 [============================>.] - ETA: 0s
33431552/33988120 [============================>.] - ETA: 0s
33628160/33988120 [============================>.] - ETA: 0s
33824768/33988120 [============================>.] - ETA: 0s
33988608/33988120 [==============================] - 10s 0us/step
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_3 (InputLayer) [(None, 224, 224, 3)] 0
_________________________________________________________________
conv_0 (QuantizedConv2D) (None, 112, 112, 32) 896
_________________________________________________________________
activation_discrete_relu (Ac (None, 112, 112, 32) 0
_________________________________________________________________
separable_1 (QuantizedSepara (None, 112, 112, 64) 2400
_________________________________________________________________
activation_discrete_relu_1 ( (None, 112, 112, 64) 0
_________________________________________________________________
separable_2 (QuantizedSepara (None, 112, 112, 128) 8896
_________________________________________________________________
separable_2_maxpool (MaxPool (None, 56, 56, 128) 0
_________________________________________________________________
activation_discrete_relu_2 ( (None, 56, 56, 128) 0
_________________________________________________________________
separable_3 (QuantizedSepara (None, 56, 56, 128) 17664
_________________________________________________________________
activation_discrete_relu_3 ( (None, 56, 56, 128) 0
_________________________________________________________________
separable_4 (QuantizedSepara (None, 56, 56, 256) 34176
_________________________________________________________________
separable_4_maxpool (MaxPool (None, 28, 28, 256) 0
_________________________________________________________________
activation_discrete_relu_4 ( (None, 28, 28, 256) 0
_________________________________________________________________
separable_5 (QuantizedSepara (None, 28, 28, 256) 68096
_________________________________________________________________
activation_discrete_relu_5 ( (None, 28, 28, 256) 0
_________________________________________________________________
separable_6 (QuantizedSepara (None, 28, 28, 512) 133888
_________________________________________________________________
separable_6_maxpool (MaxPool (None, 14, 14, 512) 0
_________________________________________________________________
activation_discrete_relu_6 ( (None, 14, 14, 512) 0
_________________________________________________________________
separable_7 (QuantizedSepara (None, 14, 14, 512) 267264
_________________________________________________________________
activation_discrete_relu_7 ( (None, 14, 14, 512) 0
_________________________________________________________________
separable_8 (QuantizedSepara (None, 14, 14, 512) 267264
_________________________________________________________________
activation_discrete_relu_8 ( (None, 14, 14, 512) 0
_________________________________________________________________
separable_9 (QuantizedSepara (None, 14, 14, 512) 267264
_________________________________________________________________
activation_discrete_relu_9 ( (None, 14, 14, 512) 0
_________________________________________________________________
separable_10 (QuantizedSepar (None, 14, 14, 512) 267264
_________________________________________________________________
activation_discrete_relu_10 (None, 14, 14, 512) 0
_________________________________________________________________
separable_11 (QuantizedSepar (None, 14, 14, 512) 267264
_________________________________________________________________
activation_discrete_relu_11 (None, 14, 14, 512) 0
_________________________________________________________________
separable_12 (QuantizedSepar (None, 14, 14, 1024) 529920
_________________________________________________________________
separable_12_maxpool (MaxPoo (None, 7, 7, 1024) 0
_________________________________________________________________
activation_discrete_relu_12 (None, 7, 7, 1024) 0
_________________________________________________________________
separable_13 (QuantizedSepar (None, 7, 7, 1024) 1058816
_________________________________________________________________
separable_13_global_avg (Glo (None, 1024) 0
_________________________________________________________________
activation_discrete_relu_13 (None, 1024) 0
_________________________________________________________________
reshape_1 (Reshape) (None, 1, 1, 1024) 0
_________________________________________________________________
dropout (Dropout) (None, 1, 1, 1024) 0
_________________________________________________________________
separable_14 (QuantizedSepar (None, 1, 1, 1000) 1033216
_________________________________________________________________
act_softmax (Activation) (None, 1, 1, 1000) 0
_________________________________________________________________
reshape_2 (Reshape) (None, 1000) 0
=================================================================
Total params: 4,224,288
Trainable params: 4,224,288
Non-trainable params: 0
_________________________________________________________________
# Check model performance
check_model_performances(model_keras_quantized_pretrained)
Out:
Keras inference on 10 images took 0.58 s.
Keras accuracy: 90.00 %
5. Conversion to Akida¶
5.1 Convert to Akida model¶
Here, the Keras quantized model is converted into a suitable version for the Akida NSoC. The cnn2snn.convert function needs as arguments the Keras model and the input scaling parameters.
from cnn2snn import convert
model_akida = convert(model_keras_quantized_pretrained,
input_scaling=input_scaling)
Out:
Warning: the activation layer 'act_softmax' will be discarded at conversion. The outputs of the Akida model will be the potentials before this activation layer.
5.2 Check hardware compliancy¶
The Model.summary method provides a detailed description of the Model layers.
It also indicates hardware incompatibilities if there are any. Hardware compatibility can also be checked manually using model_hardware_incompatibilities.
model_akida.summary()
Out:
Model Summary
____________________________________________________________________________________________
Layer (type) Output shape Kernel shape
============================================================================================
conv_0 (InputConvolutional) [112, 112, 32] (3, 3, 3, 32)
____________________________________________________________________________________________
separable_1 (SeparableConvolutional) [112, 112, 64] (3, 3, 32, 1), (1, 1, 32, 64)
____________________________________________________________________________________________
separable_2 (SeparableConvolutional) [56, 56, 128] (3, 3, 64, 1), (1, 1, 64, 128)
____________________________________________________________________________________________
separable_3 (SeparableConvolutional) [56, 56, 128] (3, 3, 128, 1), (1, 1, 128, 128)
____________________________________________________________________________________________
separable_4 (SeparableConvolutional) [28, 28, 256] (3, 3, 128, 1), (1, 1, 128, 256)
____________________________________________________________________________________________
separable_5 (SeparableConvolutional) [28, 28, 256] (3, 3, 256, 1), (1, 1, 256, 256)
____________________________________________________________________________________________
separable_6 (SeparableConvolutional) [14, 14, 512] (3, 3, 256, 1), (1, 1, 256, 512)
____________________________________________________________________________________________
separable_7 (SeparableConvolutional) [14, 14, 512] (3, 3, 512, 1), (1, 1, 512, 512)
____________________________________________________________________________________________
separable_8 (SeparableConvolutional) [14, 14, 512] (3, 3, 512, 1), (1, 1, 512, 512)
____________________________________________________________________________________________
separable_9 (SeparableConvolutional) [14, 14, 512] (3, 3, 512, 1), (1, 1, 512, 512)
____________________________________________________________________________________________
separable_10 (SeparableConvolutional) [14, 14, 512] (3, 3, 512, 1), (1, 1, 512, 512)
____________________________________________________________________________________________
separable_11 (SeparableConvolutional) [14, 14, 512] (3, 3, 512, 1), (1, 1, 512, 512)
____________________________________________________________________________________________
separable_12 (SeparableConvolutional) [7, 7, 1024] (3, 3, 512, 1), (1, 1, 512, 1024)
____________________________________________________________________________________________
separable_13 (SeparableConvolutional) [1, 1, 1024] (3, 3, 1024, 1), (1, 1, 1024, 1024)
____________________________________________________________________________________________
separable_14 (SeparableConvolutional) [1, 1, 1000] (3, 3, 1024, 1), (1, 1, 1024, 1000)
____________________________________________________________________________________________
Input shape: 224, 224, 3
Backend type: Software - 1.8.10
5.3 Check performance¶
While we compute accuracy for the 10 images set in the next cell, the following table summarizes results obtained on ImageNet.
Keras accuracy |
Akida accuracy |
---|---|
69.63 % |
69.18 % |
# Check Model performance
start = timer()
preds_akida = model_akida.predict(x_test)
end = timer()
print(f'Inference on {num_images} images took {end-start:.2f} s.\n')
accuracy_akida = np.sum(np.equal(preds_akida, labels_test)) / num_images
print(f"Accuracy: {accuracy_akida*100:.2f} %")
# For non-regression purpose
assert accuracy_akida >= 0.9
Out:
Inference on 10 images took 1.04 s.
Accuracy: 90.00 %
# Print model statistics
print("Model statistics")
stats = model_akida.get_statistics()
model_akida.predict(x_test[:20])
for _, stat in stats.items():
print(stat)
Out:
Model statistics
Layer (type) output sparsity
conv_0 (InputConvolutional) 0.39
Layer (type) input sparsity output sparsity ops
separable_1 (SeparableConvolu 0.39 0.41 142915793
Layer (type) input sparsity output sparsity ops
separable_2 (SeparableConvolu 0.41 0.35 549372551
Layer (type) input sparsity output sparsity ops
separable_3 (SeparableConvolu 0.35 0.39 302190301
Layer (type) input sparsity output sparsity ops
separable_4 (SeparableConvolu 0.39 0.57 563392449
Layer (type) input sparsity output sparsity ops
separable_5 (SeparableConvolu 0.57 0.39 200000024
Layer (type) input sparsity output sparsity ops
separable_6 (SeparableConvolu 0.39 0.63 566831393
Layer (type) input sparsity output sparsity ops
separable_7 (SeparableConvolu 0.63 0.61 171272692
Layer (type) input sparsity output sparsity ops
separable_8 (SeparableConvolu 0.61 0.69 181659101
Layer (type) input sparsity output sparsity ops
separable_9 (SeparableConvolu 0.69 0.75 143253039
Layer (type) input sparsity output sparsity ops
separable_10 (SeparableConvol 0.75 0.73 115399609
Layer (type) input sparsity output sparsity ops
separable_11 (SeparableConvol 0.73 0.72 122782189
Layer (type) input sparsity output sparsity ops
separable_12 (SeparableConvol 0.72 0.91 260061973
Layer (type) input sparsity output sparsity ops
separable_13 (SeparableConvol 0.91 0.64 42696992
Layer (type) input sparsity output sparsity ops
separable_14 (SeparableConvol 0.64 0.00 3316213
5.4 Show predictions for a random image¶
import matplotlib.pyplot as plt
import matplotlib.lines as lines
# Functions used to display the top5 results
def get_top5(potentials, true_label):
"""
Returns the top 5 classes from the output potentials
"""
tmp_pots = potentials.copy()
top5 = []
min_val = np.min(tmp_pots)
for ii in range(5):
best = np.argmax(tmp_pots)
top5.append(best)
tmp_pots[best] = min_val
vals = np.zeros((6,))
vals[:5] = potentials[top5]
if true_label not in top5:
vals[5] = potentials[true_label]
else:
vals[5] = 0
vals /= np.max(vals)
class_name = []
for ii in range(5):
class_name.append(preprocessing.index_to_label(top5[ii]).split(',')[0])
if true_label in top5:
class_name.append('')
else:
class_name.append(
preprocessing.index_to_label(true_label).split(',')[0])
return top5, vals, class_name
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 10)) # outward by 10 points
else:
spine.set_color('none') # don't draw spine
# turn off ticks where there is no spine
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
# no yaxis ticks
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
# no xaxis ticks
ax.xaxis.set_ticks([])
def prepare_plots():
fig = plt.figure(figsize=(8, 4))
# Image subplot
ax0 = plt.subplot(1, 3, 1)
imgobj = ax0.imshow(np.zeros((224, 224, 3), dtype=np.uint8))
ax0.set_axis_off()
# Top 5 results subplot
ax1 = plt.subplot(1, 2, 2)
bar_positions = (0, 1, 2, 3, 4, 6)
rects = ax1.barh(bar_positions, np.zeros((6,)), align='center', height=0.5)
plt.xlim(-0.2, 1.01)
ax1.set(xlim=(-0.2, 1.15), ylim=(-1.5, 12))
ax1.set_yticks(bar_positions)
ax1.invert_yaxis()
ax1.yaxis.set_ticks_position('left')
ax1.xaxis.set_ticks([])
adjust_spines(ax1, 'left')
ax1.add_line(lines.Line2D((0, 0), (-0.5, 6.5), color=(0.0, 0.0, 0.0)))
txt_axlbl = ax1.text(-1, -1, 'Top 5 Predictions:', size=12)
# Adjust Plot Positions
ax0.set_position([0.05, 0.055, 0.3, 0.9])
l1, b1, w1, h1 = ax1.get_position().bounds
ax1.set_position([l1 * 1.05, b1 + 0.09 * h1, w1, 0.8 * h1])
# Add title box
plt.figtext(0.5,
0.9,
"Imagenet Classification by Akida",
size=20,
ha="center",
va="center",
bbox=dict(boxstyle="round",
ec=(0.5, 0.5, 0.5),
fc=(0.9, 0.9, 1.0)))
return fig, imgobj, ax1, rects
def update_bars_chart(rects, vals, true_label):
counter = 0
for rect, h in zip(rects, yvals):
rect.set_width(h)
if counter < 5:
if top5[counter] == true_label:
if counter == 0:
rect.set_facecolor((0.0, 1.0, 0.0))
else:
rect.set_facecolor((0.0, 0.5, 0.0))
else:
rect.set_facecolor('gray')
elif counter == 5:
rect.set_facecolor('red')
counter += 1
# Prepare plots
fig, imgobj, ax1, rects = prepare_plots()
# Get a random image
img = np.random.randint(num_images)
# Predict image class
potentials_akida = model_akida.evaluate(np.expand_dims(x_test[img],
axis=0)).squeeze()
# Get top 5 prediction labels and associated names
true_label = int(validation_labels[x_test_files[img]])
top5, yvals, class_name = get_top5(potentials_akida, true_label)
# Draw Plots
imgobj.set_data(x_test[img])
ax1.set_yticklabels(class_name, rotation='horizontal', size=9)
update_bars_chart(rects, yvals, true_label)
fig.canvas.draw()
plt.show()

Total running time of the script: ( 0 minutes 30.210 seconds)