Skip to content

Instantly share code, notes, and snippets.

@FlorianMerkle
Last active May 19, 2020 10:14
Show Gist options
  • Save FlorianMerkle/7266aa89c8547933c1ad6324c2a88418 to your computer and use it in GitHub Desktop.
Save FlorianMerkle/7266aa89c8547933c1ad6324c2a88418 to your computer and use it in GitHub Desktop.
Minimal Example - L0 Brendel Bethge
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras import layers
import foolbox as fb
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_train = (x_train.reshape(60000, 784).astype('float32') / 255)
x = tf.convert_to_tensor(tf.expand_dims(x_train[422].reshape(28,28,1), axis=0), tf.float32)*1
y = tf.convert_to_tensor([y_train[422]])*1
class CustomLayer(layers.Layer):
def __init__(self, units=32, activation='relu', input_shape=(784)):
super(CustomLayer, self).__init__()
self.units = units
self.activation = activation
self.w = self.add_weight(shape=(input_shape[-1], self.units),
initializer='random_normal',
trainable=True,
name='unpruned-weights')
self.mask = self.add_weight(shape=(self.w.shape),
initializer='ones',
trainable=False,
name='pruning-masks')
self.pruned_w = self.add_weight(shape=(input_shape[-1], self.units),
initializer='ones',
trainable=False,
name='pruned-weights')
def call(self, inputs):
self.pruned_w = tf.multiply(self.w, self.mask)
x = tf.matmul(inputs, self.pruned_w)
if self.activation == 'relu':
return tf.keras.activations.relu(x)
if self.activation == 'softmax':
return tf.keras.activations.softmax(x)
if self.activation == None:
return x
raise ValueError('Activation function not implemented')
class LeNet300_100(tf.keras.Model):
def __init__(self):
super(LeNet300_100, self).__init__()
self.dense1 = CustomLayer(300, input_shape=(None, 784))
self.dense2 = CustomLayer(100, input_shape=(None, 300))
self.dense3 = CustomLayer(10, activation=None, input_shape=(None, 100))
def call(self, inputs):
x = tf.keras.layers.Flatten()(inputs)
x = self.dense1(x)
x = self.dense2(x)
x = self.dense3(x)
self.pre_softmax = x
return tf.keras.activations.softmax(x)
def initialize_model():
model = LeNet300_100()
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-3),
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) ,
metrics=['accuracy'],
experimental_run_tf_function=False
)
return model
def train_model(model):
callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=3)
model.fit(x=x_train,
y=y_train,
batch_size=64,
epochs=500,
callbacks=[callback],
validation_data=(x_test, y_test),
)
model = initialize_model()
train_model(model)
#model = tf.keras.models.load_model('../saved-models/attack-test-model')
fmodel = fb.models.TensorFlowModel(model, bounds=(0,1))
attack = fb.attacks.L0BrendelBethgeAttack()
adversarials = attack(
fmodel,
x,
y,
epsilons=None
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment