ここに本文が入る
# コメント
print("ほげほげ")
## コメント2
print("ふがふが")
# https://mail.python.org/pipermail/scipy-user/2011-May/029521.html | |
import numpy as np | |
def KLdivergence(x, y): | |
"""Compute the Kullback-Leibler divergence between two multivariate samples. | |
Parameters | |
---------- | |
x : 2D array (n,d) |
import torch | |
import torch.nn as nn | |
import torch.nn.functional as F | |
from torch.utils.data import DataLoader | |
import torchvision | |
import torchmetrics | |
import pytorch_lightning as pl | |
class TenLayersModel(pl.LightningModule): | |
def __init__(self): |
import torch | |
import torch.nn as nn | |
import torch.nn.functional as F | |
from torch.utils.data import DataLoader | |
import torchvision | |
import torchmetrics | |
import pytorch_lightning as pl | |
import time | |
class ResNet50(pl.LightningModule): |
こしあん | |
つぶあん | |
しろあん | |
ごまあん | |
うぐいすあん | |
栗あん |
import tensorflow as tf | |
from tensorflow.keras import backend as K | |
import tensorflow.keras.layers as layers | |
# https://github.com/IShengFang/SpectralNormalizationKeras/blob/master/SpectralNormalizationKeras.py | |
class ConvSN2D(layers.Conv2D): | |
def build(self, input_shape): | |
if self.data_format == 'channels_first': | |
channel_axis = 1 | |
else: |
import torch | |
import torchvision | |
from models import TenLayersModel, ResNetLikeModel | |
from torchvision import transforms | |
from tqdm import tqdm | |
import numpy as np | |
import os | |
import pickle | |
def load_cifar(): |
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
class ResidualBlock(nn.Module): | |
def __init__(self, ch): | |
super().__init__() | |
self.conv1 = self.conv_bn_relu(ch) | |
self.conv2 = self.conv_bn_relu(ch) | |
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
class Generator(nn.Module): | |
def __init__(self, n_classes): | |
super().__init__() | |
self.linear = nn.Sequential( | |
nn.Linear(100 + n_classes, 768), | |
nn.ReLU(True) |
import torch | |
from torch import nn | |
import torch.nn.functional as F | |
class ResidualBlock(nn.Module): | |
def __init__(self, ch): | |
super().__init__() | |
self.conv1 = self.conv_bn_relu(ch) | |
self.conv2 = self.conv_bn_relu(ch) | |