Created
January 27, 2021 13:38
-
-
Save Ed-Optalysys/373bfcc1a17e67f680f233ff0af0410c to your computer and use it in GitHub Desktop.
Bayesian Dropout Layer
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Bayesian dropout layer: Applies random or optionally pre defined dropout to input during both training and evaluation | |
class BayesianDropout(torch.nn.Module): | |
def __init__(self, drop_rate): | |
super(BayesianDropout, self).__init__() | |
self.p = drop_rate | |
def forward(self, x, mask=None): | |
if mask is None: | |
return F.dropout(x, self.p, training=True, inplace=False) | |
else: | |
return x * mask | |
# Example usage: | |
b_dropout = BayesianDropout(drop_rate=0.33) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment