rnn_dump.py 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. #!/usr/bin/python
  2. from __future__ import print_function
  3. from keras.models import Sequential
  4. from keras.models import Model
  5. from keras.layers import Input
  6. from keras.layers import Dense
  7. from keras.layers import LSTM
  8. from keras.layers import GRU
  9. from keras.models import load_model
  10. from keras import backend as K
  11. import sys
  12. import numpy as np
  13. def printVector(f, vector, name):
  14. v = np.reshape(vector, (-1));
  15. #print('static const float ', name, '[', len(v), '] = \n', file=f)
  16. f.write('static const opus_int8 {}[{}] = {{\n '.format(name, len(v)))
  17. for i in range(0, len(v)):
  18. f.write('{}'.format(max(-128,min(127,int(round(128*v[i]))))))
  19. if (i!=len(v)-1):
  20. f.write(',')
  21. else:
  22. break;
  23. if (i%8==7):
  24. f.write("\n ")
  25. else:
  26. f.write(" ")
  27. #print(v, file=f)
  28. f.write('\n};\n\n')
  29. return;
  30. def binary_crossentrop2(y_true, y_pred):
  31. return K.mean(2*K.abs(y_true-0.5) * K.binary_crossentropy(y_pred, y_true), axis=-1)
  32. #model = load_model(sys.argv[1], custom_objects={'binary_crossentrop2': binary_crossentrop2})
  33. main_input = Input(shape=(None, 25), name='main_input')
  34. x = Dense(32, activation='tanh')(main_input)
  35. x = GRU(24, activation='tanh', recurrent_activation='sigmoid', return_sequences=True)(x)
  36. x = Dense(2, activation='sigmoid')(x)
  37. model = Model(inputs=main_input, outputs=x)
  38. model.load_weights(sys.argv[1])
  39. weights = model.get_weights()
  40. f = open(sys.argv[2], 'w')
  41. f.write('/*This file is automatically generated from a Keras model*/\n\n')
  42. f.write('#ifdef HAVE_CONFIG_H\n#include "config.h"\n#endif\n\n#include "mlp.h"\n\n')
  43. printVector(f, weights[0], 'layer0_weights')
  44. printVector(f, weights[1], 'layer0_bias')
  45. printVector(f, weights[2], 'layer1_weights')
  46. printVector(f, weights[3], 'layer1_recur_weights')
  47. printVector(f, weights[4], 'layer1_bias')
  48. printVector(f, weights[5], 'layer2_weights')
  49. printVector(f, weights[6], 'layer2_bias')
  50. f.write('const DenseLayer layer0 = {\n layer0_bias,\n layer0_weights,\n 25, 32, 0\n};\n\n')
  51. f.write('const GRULayer layer1 = {\n layer1_bias,\n layer1_weights,\n layer1_recur_weights,\n 32, 24\n};\n\n')
  52. f.write('const DenseLayer layer2 = {\n layer2_bias,\n layer2_weights,\n 24, 2, 1\n};\n\n')
  53. f.close()