Skip to content

Instantly share code, notes, and snippets.

def model(data):
fc1w_prior = dist.Normal(loc=torch.zeros_like(dense.fc1.weight), scale=torch.ones_like(dense.fc1.weight))
fc1b_prior = dist.Normal(loc=torch.zeros_like(dense.fc1.bias), scale=torch.ones_like(dense.fc1.bias))
fc2w_prior = dist.Normal(loc=torch.zeros_like(dense.fc2.weight), scale=torch.ones_like(dense.fc2.weight))
fc2b_prior = dist.Normal(loc=torch.zeros_like(dense.fc2.bias), scale=torch.ones_like(dense.fc2.bias))
fc3w_prior = dist.Normal(loc=torch.zeros_like(dense.fc3.weight), scale=torch.ones_like(dense.fc3.weight))
fc3b_prior = dist.Normal(loc=torch.zeros_like(dense.fc3.bias), scale=torch.ones_like(dense.fc3.bias))
for epoch in range(800):
running_loss = []
for i, batch in enumerate(trainloader):
inputs = batch["features"]
labels = batch["outcomes"]
optimizer.zero_grad()
class DenseModel(nn.Module):
def __init__(self, num_in):
super(DenseModel, self).__init__()
self.fc1 = nn.Linear(num_in, 16)
self.fc2 = nn.Linear(16, 6)
self.fc3 = nn.Linear(6, 3)
def forward(self, x):
tf.keras.backend.clear_session()
dataset_size = len(train_)
kl_divergence_function = (lambda q, p, _: dist.kl_divergence(q, p) / tf.cast(dataset_size, dtype=tf.float32))
model_tfp = tf.keras.Sequential([
tf.keras.Input(shape=(2,),name="basket"),
tfp.layers.DenseFlipout(16, kernel_divergence_fn=kl_divergence_function, activation=tf.nn.relu, name="dense_tfp_1"),
tfp.layers.DenseFlipout(6, kernel_divergence_fn=kl_divergence_function, activation=tf.nn.relu, name="dense_tfp_2"),
tfp.layers.DenseFlipout(3, kernel_divergence_fn=kl_divergence_function, activation=tf.nn.softmax, name="out_tfp_pred"),
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir="./logs/egg_times", histogram_freq=1)
history = model.fit(train_,
target_,
epochs=800,
verbose=0,
use_multiprocessing=True,
callbacks=[tensorboard_callback],
validation_split=0.1,
validation_freq=20)
tf.keras.backend.clear_session()
model = tf.keras.Sequential([
tf.keras.Input(shape=(2,),name="basket"),
tf.keras.layers.Dense(16, activation="relu", name="dense_1"),
tf.keras.layers.Dense(6, activation="relu", name="dense_2"),
tf.keras.layers.Dense(3, activation="softmax", name="out_pred"),
])
learning_rate = 1.0e-4
violin_parts = plt.violinplot(underdone['time'], positions=[0], showmeans=True)
# over-ride default blue color, you can't use 'color' parameter as can on histograms etc
for pc in violin_parts['bodies']: # used to
pc.set_facecolor('red')
violin_parts['cbars'].set_edgecolor('red')
violin_parts['cmaxes'].set_edgecolor('red')
violin_parts['cmins'].set_edgecolor('red')
violin_parts['cmeans'].set_edgecolor('red')
plt.violinplot(softboiled['time'], positions=[0.5], showmeans=True)
plt.violinplot(hardboiled['time'], positions=[1], showmeans=True)