Skip to content

Instantly share code, notes, and snippets.

@HudsonHuang
Last active December 2, 2022 00:29
Show Gist options
  • Save HudsonHuang/4664bb099536315fa5dd83ab27e1a1ac to your computer and use it in GitHub Desktop.
Save HudsonHuang/4664bb099536315fa5dd83ab27e1a1ac to your computer and use it in GitHub Desktop.
mixup in numpy, tensorflow(keras), and pytorch
import numpy as np
import tensorflow as tf
def mixup_np(features, labels, alpha=0.1):
# numpy version
num_examples = features.shape[0]
num_class = labels.shape[-1]
mix = np.random.beta(alpha, alpha, size=[num_examples])
features = np.swapaxes(features, 0, -1)
features = features * mix + features[::-1] * (np.ones_like(mix) - mix)
features = np.swapaxes(features, 0, -1)
labels = np.swapaxes(labels, 0, -1)
labels = labels * mix + labels[::-1] * (np.ones_like(mix) - mix)
labels = np.swapaxes(labels, 0, -1)
return features, labels
def mixup_tf(features, labels, alpha=0.2):
# tensorflow version
print("features",features.shape)
num_examples = features.shape[0]
mix = tf.distributions.Beta(alpha, alpha).sample([num_examples, 1, 1])
#mix = tf.maximum(mix, 1 - mix)
features = features * mix + features[::-1] * (1 - mix)
labels = labels * mix[:, 0] + labels[::-1] * (1 - mix[:, 0])
return features, labels
def mixup_pt(features, labels, alpha=0.2, use_cuda=True):
# https://github.com/facebookresearch/mixup-cifar10/blob/eaff31ab397a90fbc0a4aac71fb5311144b3608b/train.py#L119
if alpha > 0:
lam = np.random.beta(alpha, alpha)
else:
lam = 1
num_examples = features.size()[0]
if use_cuda:
index = torch.randperm(num_examples).cuda()
else:
index = torch.randperm(num_examples)
#mix = np.maximum(mix, np.ones_like(mix) - mix)
features = mix * features + (1 - mix) * features[index, :]
labels = mix * labels + (1 - mix) * labels[index, :]
return features, labels
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment