Skip to content

Instantly share code, notes, and snippets.

@Koziev
Last active October 25, 2018 09:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Koziev/ce781f1acd9df0bb34a7e5d89411bb1e to your computer and use it in GitHub Desktop.
Save Koziev/ce781f1acd9df0bb34a7e5d89411bb1e to your computer and use it in GitHub Desktop.
Проверка либы NNSharp: использование в .NET моделей, обученных в Python+Keras
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NNSharp.DataTypes;
namespace sample1
{
class Program
{
static void Main(string[] args)
{
// Read the previously created json.
var reader = new NNSharp.IO.ReaderKerasModel(@"e:\projects\Keras2CSharp\model\simple_keras_model.json");
NNSharp.Models.SequentialModel model = reader.GetSequentialExecutor();
int sample_len = 10;
int nb_samples = 1;
int batch_size = 1;
Data2D input = new Data2D(nb_samples, sample_len, 1, batch_size);
List<double> y_true = new List<double>();
var rng = new Random();
for (int ibatch = 0; ibatch < batch_size; ++ibatch)
{
for (int isample = 0; isample < nb_samples; ++isample)
{
double sum = 0.0;
for (int ipoint = 0; ipoint < sample_len; ++ipoint)
{
int ix = 0;// rng.Next() % 10;
double x = ix / 11.0;
sum += x;
input[isample, ipoint, 0, ibatch] = x;
}
y_true.Add(sum / sample_len);
}
}
// Calculate the network's output.
IData output = model.ExecuteNetwork(input);
return;
}
}
}
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
import codecs
import itertools
import json
import os
import sys
import argparse
import random
import collections
import logging
import operator
import math
import tqdm
import numpy as np
import pandas as pd
from keras.models import Sequential
import keras.callbacks
from keras import backend as K
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras.layers import Conv1D, GlobalMaxPooling1D, GlobalAveragePooling1D, AveragePooling1D
from keras.layers import Input
from keras.layers import Lambda
from keras.layers import recurrent
from keras.layers.core import Dense
from keras.layers.merge import concatenate, add, multiply
from keras.layers.wrappers import Bidirectional
from keras.models import Model
from keras.models import model_from_json
from keras.layers.normalization import BatchNormalization
from keras.layers import Flatten
import keras.regularizers
from sklearn.model_selection import train_test_split
import sklearn.metrics
import KerasModeltoJSON as js
BATCH_SIZE = 1
# Генерируем простой датасет
nb_data = 10000
sample_len = 10
max_digit = 10
X_data = np.zeros((nb_data, sample_len), np.float32)
y_data = np.zeros(nb_data, np.float32)
for i in range(nb_data):
sum = 0.0
for j in range(sample_len):
ix = random.choice(range(max_digit+1)) if i > 0 else 0 # первый сэмпл будет чисто нулевым
x = ix/float(max_digit+1)
X_data[i, j] = x
sum += x
y_data[i] = sum/sample_len
X_data = X_data.reshape((nb_data, sample_len, 1))
X_train, X_val, y_train, y_val = train_test_split(X_data, y_data, test_size=0.3, random_state=31412956)
model = Sequential()
#model.add(Input(batch_shape=(BATCH_SIZE, sample_len, 1), name='input'))
model.add(recurrent.LSTM(input_shape=(sample_len, 1,), units=3, activation='sigmoid', return_sequences=False))
model.add(Dense(units=1, activation='sigmoid'))
model.compile(optimizer='adam', loss='mse')
model.summary()
arch_filepath = '../model/simple_keras_model.arch'
with open(arch_filepath, 'w') as f:
f.write(model.to_json())
weights_path = '../model/simple_keras_model.weights'
model_checkpoint = ModelCheckpoint(weights_path, monitor='val_loss',
verbose=1,
save_best_only=True,
mode='auto')
early_stopping = EarlyStopping(monitor='val_loss',
patience=5,
verbose=1,
mode='auto')
model.fit(x=X_train, y=y_train,
validation_data=(X_val, y_val),
batch_size=BATCH_SIZE,
epochs=100,
callbacks=[model_checkpoint, early_stopping],
verbose=2)
# Вернем оптимальные веса натренированной модели (они сохранены объектом ModelCheckpoint)
model.load_weights(weights_path)
# Сохраним все для последующего использования в NNSharp
wrt = js.JSONwriter(model, '../model/simple_keras_model.json')
wrt.save()
# Проверим, что модель выдаст для некоторых легко воспроизводимых последовательностей
X_data = np.zeros((1, sample_len, 1,), dtype=np.float32)
for dig in [0, 1]:
X_data.fill(dig/11.0)
y_pred = model.predict(X_data)
print('dig={} y_pred={}'.format(dig, y_pred[0]))
print('All done.')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment