Skip to content

Instantly share code, notes, and snippets.

View deeperunderstanding's full-sized avatar
Tinkering

A Deeper Understanding deeperunderstanding

Tinkering
View GitHub Profile
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@deeperunderstanding
deeperunderstanding / docker-compose.yaml
Last active June 18, 2019 14:59
compose file with database added before adding volumes
version: '3'
services:
notebook:
build:
context: ./jupyter-notebook-docker
ports:
- "8888:8888"
depends_on:
- mlflow
environment:
def create_encoder(latent_dim, cat_dim, window_size, input_dim):
input_layer = Input(shape=(window_size, input_dim))
code = TimeDistributed(Dense(64, activation='linear'))(input_layer)
code = Bidirectional(LSTM(128, return_sequences=True))(code)
code = BatchNormalization()(code)
code = ELU()(code)
code = Bidirectional(LSTM(64))(code)
code = BatchNormalization()(code)
code = ELU()(code)
def create_discriminator(latent_dim):
input_layer = Input(shape=(latent_dim,))
disc = Dense(128)(input_layer)
disc = ELU()(disc)
disc = Dense(64)(disc)
disc = ELU()(disc)
disc = Dense(1, activation="sigmoid")(disc)
model = Model(input_layer, disc)
return model
window_size = train_x.shape[1]
input_dim = train_x.shape[2]
latent_dim = 32
cat_dim = 8
prior_discriminator = create_discriminator(latent_dim)
prior_discriminator.compile(loss='binary_crossentropy',
optimizer=Nadam(0.0002, 0.5),
metrics=['accuracy'])
batches = 10000
batch_size=64
losses_disc = []
losses_disc_cat = []
losses_ae = []
losses_val = []
real = np.ones((batch_size, 1))
fake = np.zeros((batch_size, 1))
@deeperunderstanding
deeperunderstanding / fx_aae_notebook.ipynb
Last active January 5, 2024 09:54
Final_Public.ipynb
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
sealed class Try<T> {
companion object {
operator fun <T> invoke(func: () -> T): Try<T> =
try {
Success(func())
} catch (error: Exception) {
Failure(error)
}
}
fun main() {
val lines = Try {
File("./my-pets.csv").readLines().map { it.split(',') }
}
val pets : Try<List<Pet>> = lines.map { it.map(::toPet) }
when (pets) {
is Success -> println(pets.value)
fun toPet(values: List<String>): Try<Pet> {
val name = values[0]
val ageTry = Try { values[1].toInt() }
val typeTry = PetType.lookup(values[2])
return ageTry.flatMap { age -> typeTry.map { type -> Pet(name, age, type) } }
}
enum class PetType(val type: String) {