Created
July 30, 2023 19:44
-
-
Save jatinchowdhury18/2fb5c212283b6db89fce1170d4aad6cb to your computer and use it in GitHub Desktop.
RTNeural Conv1D Example
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import warnings | |
warnings.filterwarnings("ignore") | |
import tensorflow as tf | |
import tensorflow.keras as keras | |
from tensorflow.keras.layers import Input, Conv1D, PReLU, BatchNormalization | |
tf.compat.v1.enable_eager_execution() | |
import matplotlib.pyplot as plt | |
from model_utils import save_model | |
input_shape = (128, 1) | |
x = Input(shape=input_shape,name = "x") | |
conv1 = Conv1D(filters=12, kernel_size=65, strides=1, dilation_rate=1, activation=None, padding='causal',name = "conv1")(x) | |
PRelu1 = PReLU(alpha_initializer='glorot_uniform', shared_axes=[1],name = "PRelu1")(conv1) | |
bn1 = BatchNormalization(momentum=0.0, epsilon=0.01, beta_initializer='random_normal', gamma_initializer='glorot_uniform', moving_mean_initializer="random_normal", moving_variance_initializer="ones",name = "bn1")(PRelu1) | |
conv2 = Conv1D(filters=8, kernel_size=33, strides=1, dilation_rate=1, activation=None, padding='causal',name = "conv2")(bn1) | |
PRelu2 = PReLU(alpha_initializer='glorot_uniform', shared_axes=[1],name = "PRelu2")(conv2) | |
bn2 = BatchNormalization(momentum=0.0, epsilon=0.01, beta_initializer='random_normal', gamma_initializer='glorot_uniform', moving_mean_initializer="random_normal", moving_variance_initializer="ones",name = "bn2")(PRelu2) | |
conv3 = Conv1D(filters=4, kernel_size=13, strides=1, dilation_rate=1, activation=None, padding='causal',name = "conv3")(bn2) | |
PRelu3 = PReLU(alpha_initializer='glorot_uniform', shared_axes=[1],name = "PRelu3")(conv3) | |
bn3 = BatchNormalization(momentum=0.0, epsilon=0.01, beta_initializer='random_normal', gamma_initializer='glorot_uniform', moving_mean_initializer="random_normal", moving_variance_initializer="ones",name = "bn3")(PRelu3) | |
conv4 = Conv1D(filters=1, kernel_size=5, strides=1, dilation_rate=1, activation="tanh", padding='causal',name = "conv4")(bn3) | |
model = keras.Model(inputs=x, outputs=conv4) | |
model.summary() | |
# construct signals | |
x_data = 10 * np.sin(np.arange(input_shape[0]) * np.pi * 0.1) | |
y = model.predict((x_data.reshape((1, -1, 1)))) | |
print(y.shape) | |
y = y.flatten() | |
# plot signals | |
plt.figure() | |
plt.plot(x_data) | |
plt.plot(y, '--') | |
plt.ylim(-1.0, 1.0) | |
plt.savefig('python/conv.png') | |
# save signals | |
np.savetxt('test_data/conv_x_python.csv', x_data, delimiter=',') | |
np.savetxt('test_data/conv_y_python.csv', y, delimiter=',') | |
save_model(model, 'models/conv.json') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include "RTNeural/RTNeural.h" | |
#include "tests/load_csv.hpp" | |
#include <filesystem> | |
#include <iostream> | |
namespace fs = std::filesystem; | |
std::string getModelFile(fs::path path) | |
{ | |
// get path of RTNeural root directory | |
while ((--path.end())->string() != "RTNeural") | |
path = path.parent_path(); | |
// get path of model file | |
path.append("models/conv.json"); | |
return path.string(); | |
} | |
int main(int argc, char* argv[]) | |
{ | |
std::ifstream modelInputsFile{ "test_data/conv_x_python.csv" }; | |
std::vector<float> inputs = load_csv::loadFile<float>(modelInputsFile); | |
std::cout << "Data with size = " << inputs.size() <<" are loaded" << std::endl; | |
std::ifstream modelOutputsFile{ "test_data/conv_y_python.csv" }; | |
std::vector<float> referenceOutputs = load_csv::loadFile<float>(modelOutputsFile); | |
RTNeural::ModelT<float, 1, 1, | |
RTNeural::Conv1DT<float, 1, 12, 65, 1, false>, | |
RTNeural::PReLUActivationT<float, 12>, | |
RTNeural::BatchNorm1DT<float, 12, true>, | |
RTNeural::Conv1DT<float, 12, 8, 33, 1, false>, | |
RTNeural::PReLUActivationT<float, 8>, | |
RTNeural::BatchNorm1DT<float, 8, true>, | |
RTNeural::Conv1DT<float, 8, 4, 13, 1, false>, | |
RTNeural::PReLUActivationT<float, 4>, | |
RTNeural::BatchNorm1DT<float, 4, true>, | |
RTNeural::Conv1DT<float, 4, 1, 5, 1, false>, | |
RTNeural::TanhActivationT<float, 1>> modelt; | |
auto executablePath = fs::weakly_canonical(fs::path(argv[0])); | |
auto modelFilePath = getModelFile(executablePath); | |
std::cout << "Loading model from path: " << modelFilePath << std::endl; | |
std::ifstream jsonStream(modelFilePath, std::ifstream::binary); | |
modelt.parseJson(jsonStream, true); | |
modelt.reset(); | |
std::vector<float> testOutputs; | |
testOutputs.resize(referenceOutputs.size(), 0.0f); | |
for (size_t i = 0; i < inputs.size(); ++i) | |
{ | |
testOutputs[i] = modelt.forward(inputs.data() + i); | |
} | |
for (size_t i = 0; i < inputs.size(); ++i) | |
{ | |
std::cout << referenceOutputs[i] << " | " << testOutputs[i] << std::endl; | |
} | |
return 0; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/Users/jatin/ChowDSP/Research/RTNeural/RTNeural/build-examples-eigen/examples/conv1d_example/Debug/conv1d_example | |
Data with size = 128 are loaded | |
Loading model from path: /Users/jatin/ChowDSP/Research/RTNeural/RTNeural/models/conv.json | |
# dimensions: 1 | |
Layer: conv1d | |
Dims: 12 | |
Layer: prelu | |
Dims: 12 | |
Layer: batchnorm | |
Dims: 12 | |
Layer: conv1d | |
Dims: 8 | |
Layer: prelu | |
Dims: 8 | |
Layer: batchnorm | |
Dims: 8 | |
Layer: conv1d | |
Dims: 4 | |
Layer: prelu | |
Dims: 4 | |
Layer: batchnorm | |
Dims: 4 | |
Layer: conv1d | |
Dims: 1 | |
activation: tanh | |
0.0227485 | 0.0227485 | |
0.0469352 | 0.0469352 | |
0.0793129 | 0.0793129 | |
0.108585 | 0.108585 | |
0.0864172 | 0.0864172 | |
0.0816107 | 0.0816108 | |
0.0844359 | 0.0844359 | |
0.0852961 | 0.0852961 | |
0.0859339 | 0.0859338 | |
0.088233 | 0.088233 | |
0.0864823 | 0.0864823 | |
0.0889542 | 0.0889542 | |
0.0874944 | 0.0874944 | |
0.084971 | 0.084971 | |
0.0836835 | 0.0836835 | |
0.0837997 | 0.0837997 | |
0.0825718 | 0.0825717 | |
0.0824659 | 0.0824659 | |
0.082222 | 0.082222 | |
0.0807985 | 0.0807985 | |
0.0798836 | 0.0798836 | |
0.0821496 | 0.0821496 | |
0.0758316 | 0.0758316 | |
0.0753655 | 0.0753656 | |
0.0557327 | 0.0557327 | |
0.0489168 | 0.0489168 | |
0.0440596 | 0.0440596 | |
0.0354367 | 0.0354367 | |
0.0408674 | 0.0408674 | |
0.0411478 | 0.0411478 | |
0.0226464 | 0.0226464 | |
0.0226619 | 0.0226619 | |
0.0246905 | 0.0246904 | |
0.028442 | 0.028442 | |
0.0355418 | 0.0355418 | |
0.0497209 | 0.0497209 | |
0.0642134 | 0.0642134 | |
0.065164 | 0.065164 | |
0.0547634 | 0.0547634 | |
0.0498201 | 0.0498201 | |
0.0443614 | 0.0443614 | |
0.0450946 | 0.0450946 | |
0.035899 | 0.035899 | |
0.0136339 | 0.0136339 | |
-0.00703312 | -0.00703312 | |
-0.0271763 | -0.0271763 | |
-0.0496456 | -0.0496455 | |
-0.0619817 | -0.0619817 | |
-0.0646335 | -0.0646335 | |
-0.0618597 | -0.0618597 | |
-0.0656286 | -0.0656286 | |
-0.0572471 | -0.0572471 | |
-0.0551374 | -0.0551374 | |
-0.0566866 | -0.0566865 | |
-0.0438252 | -0.0438252 | |
-0.0247437 | -0.0247437 | |
-0.0167545 | -0.0167544 | |
-0.00840331 | -0.0084033 | |
-0.00356988 | -0.00356986 | |
-0.0074762 | -0.00747616 | |
-0.00947504 | -0.00947505 | |
-0.0142049 | -0.0142048 | |
-0.00737881 | -0.0073788 | |
-0.00884268 | -0.00884265 | |
-0.0198353 | -0.0198353 | |
-0.0218407 | -0.0218407 | |
-0.0508656 | -0.0508656 | |
-0.0679643 | -0.0679643 | |
-0.0943454 | -0.0943454 | |
-0.115273 | -0.115273 | |
-0.114673 | -0.114673 | |
-0.104984 | -0.104984 | |
-0.123324 | -0.123324 | |
-0.111403 | -0.111403 | |
-0.067003 | -0.0670029 | |
-0.038416 | -0.0384161 | |
-0.0239184 | -0.0239184 | |
-0.0259809 | -0.0259809 | |
-0.0385573 | -0.0385573 | |
-0.0476143 | -0.0476144 | |
-0.0580758 | -0.0580757 | |
-0.0515314 | -0.0515313 | |
-0.0652689 | -0.0652689 | |
-0.0908117 | -0.0908117 | |
-0.0975168 | -0.0975168 | |
-0.105118 | -0.105118 | |
-0.130663 | -0.130663 | |
-0.152049 | -0.152049 | |
-0.167665 | -0.167665 | |
-0.159497 | -0.159497 | |
-0.136552 | -0.136552 | |
-0.115465 | -0.115465 | |
-0.0956429 | -0.095643 | |
-0.0714352 | -0.0714352 | |
-0.0286225 | -0.0286224 | |
-0.00121808 | -0.00121809 | |
0.00518463 | 0.00518466 | |
0.00864545 | 0.00864546 | |
0.00701354 | 0.00701358 | |
-0.00885276 | -0.00885276 | |
-0.00656808 | -0.00656806 | |
-0.0277943 | -0.0277942 | |
-0.0605838 | -0.0605837 | |
-0.0877284 | -0.0877284 | |
-0.11282 | -0.11282 | |
-0.137201 | -0.137201 | |
-0.158516 | -0.158516 | |
-0.172455 | -0.172455 | |
-0.179945 | -0.179945 | |
-0.167146 | -0.167146 | |
-0.137652 | -0.137652 | |
-0.11634 | -0.11634 | |
-0.0851963 | -0.0851962 | |
-0.0660128 | -0.0660128 | |
-0.0213864 | -0.0213864 | |
-0.00102227 | -0.00102228 | |
0.0017092 | 0.00170925 | |
0.00731937 | 0.00731936 | |
-0.00124542 | -0.0012454 | |
-0.0143371 | -0.014337 | |
-0.0119931 | -0.0119931 | |
-0.03146 | -0.03146 | |
-0.061953 | -0.061953 | |
-0.0881181 | -0.0881181 | |
-0.112118 | -0.112118 | |
-0.137004 | -0.137004 | |
-0.157973 | -0.157973 | |
-0.171742 | -0.171742 | |
Process finished with exit code 0 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment