Last active
June 30, 2020 04:45
-
-
Save jaemin93/604011d9a5f06c8189a397f5ae821038 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn as nn | |
import torch.nn.functional as F | |
from tqdm import tqdm | |
class Net(nn.Module): | |
def __init__(self): | |
super(Net, self).__init__() | |
self.conv1 = nn.Conv2d(1, 32, 3, 1) | |
self.conv2 = nn.Conv2d(32, 64, 3, 1) | |
self.dropout1 = nn.Dropout2d(0.25) | |
self.dropout2 = nn.Dropout2d(0.5) | |
self.fc1 = nn.Linear(9216, 128) | |
self.fc2 = nn.Linear(128, 10) | |
# x represents our data | |
def forward(self, x): | |
# Pass data through conv1 | |
x = self.conv1(x) | |
# Use the rectified-linear activation function over x | |
x = F.relu(x) | |
x = self.conv2(x) | |
x = F.relu(x) | |
# Run max pooling over x | |
x = F.max_pool2d(x, 2) | |
# Pass data through dropout1 | |
x = self.dropout1(x) | |
# Flatten x with start_dim=1 | |
x = torch.flatten(x, 1) | |
# Pass data through fc1 | |
x = self.fc1(x) | |
x = F.relu(x) | |
x = self.dropout2(x) | |
x = self.fc2(x) | |
# Apply softmax to x | |
output = F.log_softmax(x, dim=1) | |
return output | |
my_nn = Net().cuda() | |
for i in tqdm(range(100000)): | |
random_data = torch.rand((1, 1, 28, 28)).to('cuda') | |
result = my_nn(random_data) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment