Skip to content

Commit

Permalink
Enable quantised model for TF Bonsai using quantizeBonsaiModels
Browse files Browse the repository at this point in the history
  • Loading branch information
Aditya Kusupati committed Dec 28, 2018
1 parent 1b9350b commit bece1c1
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 0 deletions.
10 changes: 10 additions & 0 deletions tf/examples/Bonsai/README.md
Expand Up @@ -46,6 +46,16 @@ Non-Zeros: 4156.0 Model Size: 31.703125 KB hasSparse: True

usps10 directory will now have a consolidated results file called `TFBonsaiResults.txt` and a directory `TFBonsaiResults` with the corresponding models with each run of the code on the usps10 dataset

## Byte Quantization(Q) for model compression
If you wish to quantize the generated model to use byte quantized integers use `quantizeBonsaiModels.py`. Usage Instructions:

```
python quantizeBonsaiModels.py -h
```

This will generate quantized models with a suffix of `q` before every param stored in a new directory `QuantizedTFBonsaiModel` inside the model directory.
One can use this model further on edge devices.


Copyright (c) Microsoft Corporation. All rights reserved.

Expand Down
19 changes: 19 additions & 0 deletions tf/examples/Bonsai/helpermethods.py
Expand Up @@ -106,6 +106,25 @@ def getArgs():
return parser.parse_args()


def getQuantArgs():
'''
Function to parse arguments for Model Quantisation
'''
parser = argparse.ArgumentParser(
description='Arguments for quantizing Fast models. ' +
'Works only for piece-wise linear non-linearities, ' +
'like relu, quantTanh, quantSigm (check rnn.py for the definitions)')
parser.add_argument('-dir', '--model-dir', required=True,
help='model directory containing' +
'*.npy weight files dumped from the trained model')
parser.add_argument('-m', '--max-val', type=checkIntNneg, default=127,
help='this represents the maximum possible value ' +
'in model, essentially the byte complexity, ' +
'127=> 1 byte is default')

return parser.parse_args()


def createTimeStampDir(dataDir):
'''
Creates a Directory with timestamp as it's name
Expand Down
72 changes: 72 additions & 0 deletions tf/examples/Bonsai/quantizeBonsaiModels.py
@@ -0,0 +1,72 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.

import helpermethods
import os
import numpy as np


def min_max(A, name):
print(name + " has max: " + str(np.max(A)) + " min: " + str(np.min(A)))
return np.max([np.abs(np.max(A)), np.abs(np.min(A))])


def quantizeFastModels(modelDir, maxValue=127, scalarScaleFactor=1000):
ls = os.listdir(modelDir)
paramNameList = []
paramWeightList = []
paramLimitList = []

for file in ls:
if file.endswith("npy"):
if file.startswith("mean") or file.startswith("std") or file.startswith("hyperParam"):
continue
else:
paramNameList.append(file)
temp = np.load(modelDir + "/" + file)
paramWeightList.append(temp)
paramLimitList.append(min_max(temp, file))

paramLimit = np.max(paramLimitList)

paramScaleFactor = np.round((2.0 * maxValue + 1.0) / (2.0 * paramLimit))

quantParamWeights = []
for param in paramWeightList:
temp = np.round(paramScaleFactor * param)
temp[temp[:] > maxValue] = maxValue
temp[temp[:] < -maxValue] = -1 * (maxValue + 1)

if maxValue <= 127:
temp = temp.astype('int8')
elif maxValue <= 32767:
temp = temp.astype('int16')
else:
temp = temp.astype('int32')

quantParamWeights.append(temp)

if os.path.isdir(modelDir + '/QuantizedTFBonsaiModel') is False:
try:
os.mkdir(modelDir + '/QuantizedTFBonsaiModel')
quantModelDir = modelDir + '/QuantizedTFBonsaiModel'
except OSError:
print("Creation of the directory %s failed" %
modelDir + '/QuantizedFastModel')

np.save(quantModelDir + "/paramScaleFactor.npy",
paramScaleFactor.astype('int32'))

for i in range(len(paramNameList)):
np.save(quantModelDir + "/q" + paramNameList[i], quantParamWeights[i])

print("\n\nQuantized Model Dir: " + quantModelDir)


def main():
args = helpermethods.getQuantArgs()
quantizeFastModels(args.model_dir, int(args.max_val))


if __name__ == '__main__':
main()

0 comments on commit bece1c1

Please sign in to comment.