Skip to content

Instantly share code, notes, and snippets.

@daeken
Created February 9, 2019 09:21
Show Gist options
  • Save daeken/d12ae3045493a6c8c75dcce8cdad756b to your computer and use it in GitHub Desktop.
Save daeken/d12ae3045493a6c8c75dcce8cdad756b to your computer and use it in GitHub Desktop.
import tensorflow as tf
from tensorflow.keras import layers
from glob import glob
batchSize = 100
def toTimeDomain(x):
rpart = tf.slice(x, [0, 0], [batchSize, 512 * 9])
ipart = tf.slice(x, [0, 512 * 9], [batchSize, 512 * 9])
complex = tf.complex(rpart, ipart)
td = tf.spectral.ifft(complex)
return tf.math.real(td)
def loadOne(fn):
with file(fn, 'rb') as fp:
edata = tf.io.decode_raw(fp.read(512 * 9 * 4), tf.float32)
idata = tf.io.decode_raw(fp.read(512 * 2 * 2), tf.float32)
return idata, edata
tf.enable_eager_execution()
print 'Loading'
inputs = []
outputs = []
for fn in glob('training/*.bin'):
i, e = loadOne(fn)
inputs.append(i)
outputs.append(e)
print 'Done loading'
model = tf.keras.Sequential()
model.add(layers.Dense(2048, activation='tanh'))
model.add(layers.Dense(16384, activation='tanh'))
model.add(layers.Dense(9216, activation='tanh'))
model.add(layers.Lambda(toTimeDomain))
model.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss='mae',
metrics=['mae']
)
dataset = tf.data.Dataset.from_tensor_slices((inputs, outputs))
dataset = dataset.batch(batchSize)
dataset = dataset.repeat()
model.fit(dataset, epochs=100, steps_per_epoch=20)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment