Skip to content

Instantly share code, notes, and snippets.

@Whamp
Created April 26, 2019 01:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Whamp/aa68d774450bffe8ef269133942014fe to your computer and use it in GitHub Desktop.
Save Whamp/aa68d774450bffe8ef269133942014fe to your computer and use it in GitHub Desktop.
Imagewoof Training with LearnedReLU
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"from exp.nb_10c import *"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# LearnedRelu"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"class LearnedRelu(nn.Module):\n",
" def __init__(self, leak=0.05, sub=0.25, maxv=10):\n",
" super().__init__()\n",
" self.mom = mom\n",
" self.leak = nn.Parameter(torch.ones(1)*leak)\n",
" self.sub = nn.Parameter(torch.zeros(1)+sub)\n",
" self.maxv = nn.Parameter(torch.ones(1)*maxv)\n",
" \n",
" def forward(self, x):\n",
" if self.training:\n",
" with torch.no_grad():#Set some limits \n",
" self.leak.clamp_(0,.5) #Don't have a leak of more than 0.5 or less than 0\n",
" self.sub.clamp_(0,1) #don't subtract more than 1 and not less than 0\n",
" self.maxv.clamp_(5,100) #don't let maxv go above 10 and not below -10\n",
" x = F.leaky_relu(x,self.leak.item())\n",
" x.sub_(self.sub)\n",
" x.clamp_max_(self.maxv.item()) \n",
" return x"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## XResNet"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def noop(x): return x\n",
"\n",
"class Flatten(nn.Module):\n",
" def forward(self, x): return x.view(x.size(0), -1)\n",
"\n",
"def conv(ni, nf, ks=3, stride=1, bias=False):\n",
" return nn.Conv2d(ni, nf, kernel_size=ks, stride=stride, padding=ks//2, bias=bias)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Use LearnedRelu instead of nn.ReLU with starting params of 0leak and 0.25 sub and maxv of 10"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"#act_fn = nn.ReLU(inplace=True)\n",
"\n",
"def init_cnn(m):\n",
" if getattr(m, 'bias', None) is not None: nn.init.constant_(m.bias, 0)\n",
" if isinstance(m, (nn.Conv2d,nn.Linear)): nn.init.kaiming_normal_(m.weight)\n",
" for l in m.children(): init_cnn(l)\n",
"\n",
"def conv_layer(ni, nf, ks=3, stride=1, zero_bn=False, act=True):\n",
" bn = nn.BatchNorm2d(nf)\n",
" nn.init.constant_(bn.weight, 0. if zero_bn else 1.)\n",
" layers = [conv(ni, nf, ks, stride=stride), bn]\n",
" if act: layers.append(act_fn)\n",
" return nn.Sequential(*layers)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"class ResBlock(nn.Module):\n",
" def __init__(self, expansion, ni, nh, stride=1):\n",
" super().__init__()\n",
" nf,ni = nh*expansion,ni*expansion\n",
" layers = [conv_layer(ni, nh, 1)]\n",
" layers += [\n",
" conv_layer(nh, nf, 3, stride=stride, zero_bn=True, act=False)\n",
" ] if expansion==1 else [\n",
" conv_layer(nh, nh, 3, stride=stride),\n",
" conv_layer(nh, nf, 1, zero_bn=True, act=False)\n",
" ]\n",
" self.convs = nn.Sequential(*layers)\n",
" self.idconv = noop if ni==nf else conv_layer(ni, nf, 1, act=False)\n",
" self.pool = noop if stride==1 else nn.AvgPool2d(2)\n",
"\n",
" def forward(self, x): return act_fn(self.convs(x) + self.idconv(self.pool(x)))"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"class XResNet(nn.Sequential):\n",
" @classmethod\n",
" def create(cls, expansion, layers, c_in=3, c_out=1000):\n",
" nfs = [c_in, (c_in+1)*8, 64, 64]\n",
" stem = [conv_layer(nfs[i], nfs[i+1], stride=2 if i==0 else 1)\n",
" for i in range(3)]\n",
"\n",
" nfs = [64//expansion,64,128,256,512]\n",
" res_layers = [cls._make_layer(expansion, nfs[i], nfs[i+1],\n",
" n_blocks=l, stride=1 if i==0 else 2)\n",
" for i,l in enumerate(layers)]\n",
" res = cls(\n",
" *stem,\n",
" nn.MaxPool2d(kernel_size=3, stride=2, padding=1),\n",
" *res_layers,\n",
" nn.AdaptiveAvgPool2d(1), Flatten(),\n",
" nn.Linear(nfs[-1]*expansion, c_out),\n",
" )\n",
" init_cnn(res)\n",
" return res\n",
"\n",
" @staticmethod\n",
" def _make_layer(expansion, ni, nf, n_blocks, stride):\n",
" return nn.Sequential(\n",
" *[ResBlock(expansion, ni if i==0 else nf, nf, stride if i==0 else 1)\n",
" for i in range(n_blocks)])"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def xresnet18_LR (**kwargs): return XResNet.create(1, [2, 2, 2, 2], **kwargs)\n",
"def xresnet34_LR (**kwargs): return XResNet.create(1, [3, 4, 6, 3], **kwargs)\n",
"def xresnet50_LR (**kwargs): return XResNet.create(4, [3, 4, 6, 3], **kwargs)\n",
"def xresnet101_LR(**kwargs): return XResNet.create(4, [3, 4, 23, 3], **kwargs)\n",
"def xresnet152_LR(**kwargs): return XResNet.create(4, [3, 8, 36, 3], **kwargs)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Train"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"cbfs = [partial(AvgStatsCallback,accuracy), ProgressCallback, CudaCallback,\n",
" partial(BatchTransformXCallback, norm_imagenette),\n",
" partial(MixUp, 0.2)\n",
" ]"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"loss_func = LabelSmoothingCrossEntropy()\n",
"opt_func = adam_opt(mom=0.9, mom_sqr=0.99, eps=1e-6, wd=1e-2)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def get_batch(dl, learn):\n",
" learn.xb,learn.yb = next(iter(dl))\n",
" learn.do_begin_fit(0)\n",
" learn('begin_batch')\n",
" learn('after_fit')\n",
" return learn.xb,learn.yb"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We need to replace the old `model_summary` since it used to take a `Runner`."
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"# export\n",
"def model_summary(model, data, find_all=False, print_mod=False):\n",
" xb,yb = get_batch(data.valid_dl, learn)\n",
" mods = find_modules(model, is_lin_layer) if find_all else model.children()\n",
" f = lambda hook,mod,inp,out: print(f\"====\\n{mod}\\n\" if print_mod else \"\", out.shape)\n",
" with Hooks(mods, f) as hooks: learn.model(xb)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"learn = Learner(arch(), data, loss_func, lr=1, cb_funcs=cbfs, opt_func=opt_func)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" torch.Size([64, 32, 64, 64])\n",
" torch.Size([64, 64, 64, 64])\n",
" torch.Size([64, 64, 64, 64])\n",
" torch.Size([64, 64, 32, 32])\n",
" torch.Size([64, 64, 32, 32])\n",
" torch.Size([64, 128, 16, 16])\n",
" torch.Size([64, 256, 8, 8])\n",
" torch.Size([64, 512, 4, 4])\n",
" torch.Size([64, 512, 1, 1])\n",
" torch.Size([64, 512])\n",
" torch.Size([64, 10])\n"
]
}
],
"source": [
"learn.model = learn.model.cuda()\n",
"model_summary(learn.model, data, print_mod=False)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def create_phases(phases):\n",
" phases = listify(phases)\n",
" return phases + [1-sum(phases)]"
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0.3, 0.7]\n",
"[0.3, 0.2, 0.5]\n"
]
}
],
"source": [
"print(create_phases(0.3))\n",
"print(create_phases([0.3,0.2]))"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [],
"source": [
"pct_start = 0.5\n",
"phases = create_phases(pct_start)\n",
"sched_lr = combine_scheds(phases, cos_1cycle_anneal(lr/10., lr, lr/1e5))\n",
"sched_mom = combine_scheds(phases, cos_1cycle_anneal(0.95,0.85, 0.95))"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"cbsched = [\n",
" ParamScheduler('lr', sched_lr),\n",
" ParamScheduler('mom', sched_mom)]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## cnn_learner"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def cnn_learner(arch, data, loss_func, opt_func, c_in=None, c_out=None,\n",
" lr=3e-3, cuda=True, norm=None, progress=True, mixup=0, xtra_cb=None, **kwargs):\n",
" cbfs = [partial(AvgStatsCallback,accuracy)]+listify(xtra_cb)\n",
" if progress: cbfs.append(ProgressCallback)\n",
" if cuda: cbfs.append(CudaCallback)\n",
" if norm: cbfs.append(partial(BatchTransformXCallback, norm))\n",
" if mixup: cbfs.append(partial(MixUp, mixup))\n",
" arch_args = {}\n",
" if not c_in : c_in = data.c_in\n",
" if not c_out: c_out = data.c_out\n",
" if c_in: arch_args['c_in' ]=c_in\n",
" if c_out: arch_args['c_out']=c_out\n",
" return Learner(arch(**arch_args), data, loss_func, opt_func=opt_func, lr=lr, cb_funcs=cbfs, **kwargs)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Imagewoof training"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [],
"source": [
"path = datasets.untar_data(datasets.URLs.IMAGEWOOF_160)"
]
},
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [],
"source": [
"size = 128\n",
"tfms = [make_rgb, RandomResizedCrop(128,scale=(0.35,1)), np_to_float, PilRandomFlip()]\n",
"bs = 64\n",
"\n",
"il = ImageList.from_files(path, tfms=tfms)\n",
"sd = SplitData.split_by_func(il, partial(grandparent_splitter, valid_name='val'))\n",
"ll = label_by_func(sd, parent_labeler, proc_y=CategoryProcessor())\n",
"\n",
"ll.valid.x.tfms = [make_rgb, CenterCrop(size), np_to_float]\n",
"\n",
"data = ll.to_databunch(bs, c_in=3, c_out=10, num_workers=16)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Start with 5 epochs per arch"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet18 with act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 39,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet18_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.176249</td>\n",
" <td>0.237594</td>\n",
" <td>2.221140</td>\n",
" <td>0.240000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.069809</td>\n",
" <td>0.309860</td>\n",
" <td>2.099944</td>\n",
" <td>0.280000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.983187</td>\n",
" <td>0.356351</td>\n",
" <td>1.887672</td>\n",
" <td>0.388000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.838930</td>\n",
" <td>0.452465</td>\n",
" <td>1.719779</td>\n",
" <td>0.436000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.699843</td>\n",
" <td>0.537899</td>\n",
" <td>1.478415</td>\n",
" <td>0.584000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(5, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"xresnet34 act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet34_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 42,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.196493</td>\n",
" <td>0.224346</td>\n",
" <td>2.278707</td>\n",
" <td>0.186000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.056007</td>\n",
" <td>0.312751</td>\n",
" <td>2.125399</td>\n",
" <td>0.246000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.985658</td>\n",
" <td>0.356351</td>\n",
" <td>2.044914</td>\n",
" <td>0.338000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.843285</td>\n",
" <td>0.439297</td>\n",
" <td>1.567619</td>\n",
" <td>0.522000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.686897</td>\n",
" <td>0.541995</td>\n",
" <td>1.457239</td>\n",
" <td>0.582000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(5, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet50 act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 43,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet50_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 44,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.234159</td>\n",
" <td>0.223703</td>\n",
" <td>2.236628</td>\n",
" <td>0.238000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.131158</td>\n",
" <td>0.269552</td>\n",
" <td>2.507610</td>\n",
" <td>0.198000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>2.049494</td>\n",
" <td>0.314196</td>\n",
" <td>1.884213</td>\n",
" <td>0.360000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.905126</td>\n",
" <td>0.397061</td>\n",
" <td>1.616795</td>\n",
" <td>0.478000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.746164</td>\n",
" <td>0.504497</td>\n",
" <td>1.477996</td>\n",
" <td>0.570000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(5, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Now try 20 epochs"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet18 for 20 epochs act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 45,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet18_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 46,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.167902</td>\n",
" <td>0.245222</td>\n",
" <td>2.191928</td>\n",
" <td>0.208000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.026972</td>\n",
" <td>0.332664</td>\n",
" <td>1.896216</td>\n",
" <td>0.382000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.939512</td>\n",
" <td>0.394974</td>\n",
" <td>1.802670</td>\n",
" <td>0.430000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.873435</td>\n",
" <td>0.433114</td>\n",
" <td>1.819426</td>\n",
" <td>0.376000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.828495</td>\n",
" <td>0.456078</td>\n",
" <td>1.793037</td>\n",
" <td>0.424000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>5</td>\n",
" <td>1.792697</td>\n",
" <td>0.486992</td>\n",
" <td>1.786889</td>\n",
" <td>0.458000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>6</td>\n",
" <td>1.756102</td>\n",
" <td>0.505621</td>\n",
" <td>1.610511</td>\n",
" <td>0.502000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>7</td>\n",
" <td>1.716103</td>\n",
" <td>0.526578</td>\n",
" <td>1.621264</td>\n",
" <td>0.500000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>8</td>\n",
" <td>1.671429</td>\n",
" <td>0.551550</td>\n",
" <td>1.650194</td>\n",
" <td>0.508000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>9</td>\n",
" <td>1.617615</td>\n",
" <td>0.586398</td>\n",
" <td>1.516406</td>\n",
" <td>0.556000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>10</td>\n",
" <td>1.597746</td>\n",
" <td>0.598201</td>\n",
" <td>1.454922</td>\n",
" <td>0.596000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>11</td>\n",
" <td>1.565472</td>\n",
" <td>0.615224</td>\n",
" <td>1.329912</td>\n",
" <td>0.668000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>12</td>\n",
" <td>1.518125</td>\n",
" <td>0.647503</td>\n",
" <td>1.276211</td>\n",
" <td>0.690000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>13</td>\n",
" <td>1.461696</td>\n",
" <td>0.679942</td>\n",
" <td>1.254484</td>\n",
" <td>0.694000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>14</td>\n",
" <td>1.415326</td>\n",
" <td>0.695279</td>\n",
" <td>1.238152</td>\n",
" <td>0.688000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>15</td>\n",
" <td>1.379721</td>\n",
" <td>0.724185</td>\n",
" <td>1.111844</td>\n",
" <td>0.734000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>16</td>\n",
" <td>1.339659</td>\n",
" <td>0.744741</td>\n",
" <td>1.082541</td>\n",
" <td>0.762000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>17</td>\n",
" <td>1.297141</td>\n",
" <td>0.763610</td>\n",
" <td>1.037464</td>\n",
" <td>0.772000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>18</td>\n",
" <td>1.277375</td>\n",
" <td>0.779910</td>\n",
" <td>1.011208</td>\n",
" <td>0.804000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>19</td>\n",
" <td>1.258403</td>\n",
" <td>0.784085</td>\n",
" <td>1.006406</td>\n",
" <td>0.802000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(20, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet34 for 20 epochs act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 48,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet34_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 49,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.187672</td>\n",
" <td>0.233580</td>\n",
" <td>2.063233</td>\n",
" <td>0.254000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.021192</td>\n",
" <td>0.336840</td>\n",
" <td>1.867386</td>\n",
" <td>0.386000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.905678</td>\n",
" <td>0.406697</td>\n",
" <td>1.746315</td>\n",
" <td>0.414000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.839112</td>\n",
" <td>0.447406</td>\n",
" <td>1.697816</td>\n",
" <td>0.454000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.792300</td>\n",
" <td>0.482736</td>\n",
" <td>1.871543</td>\n",
" <td>0.412000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>5</td>\n",
" <td>1.741217</td>\n",
" <td>0.513409</td>\n",
" <td>1.678299</td>\n",
" <td>0.468000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>6</td>\n",
" <td>1.702204</td>\n",
" <td>0.531717</td>\n",
" <td>1.634758</td>\n",
" <td>0.492000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>7</td>\n",
" <td>1.666179</td>\n",
" <td>0.559579</td>\n",
" <td>1.740362</td>\n",
" <td>0.444000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>8</td>\n",
" <td>1.632642</td>\n",
" <td>0.577084</td>\n",
" <td>1.588213</td>\n",
" <td>0.504000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>9</td>\n",
" <td>1.594113</td>\n",
" <td>0.601012</td>\n",
" <td>1.429820</td>\n",
" <td>0.600000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>10</td>\n",
" <td>1.544769</td>\n",
" <td>0.627670</td>\n",
" <td>1.406165</td>\n",
" <td>0.614000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>11</td>\n",
" <td>1.511973</td>\n",
" <td>0.642524</td>\n",
" <td>1.270103</td>\n",
" <td>0.674000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>12</td>\n",
" <td>1.477450</td>\n",
" <td>0.669584</td>\n",
" <td>1.284006</td>\n",
" <td>0.666000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>13</td>\n",
" <td>1.427329</td>\n",
" <td>0.691103</td>\n",
" <td>1.188570</td>\n",
" <td>0.704000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>14</td>\n",
" <td>1.379151</td>\n",
" <td>0.718886</td>\n",
" <td>1.178042</td>\n",
" <td>0.716000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>15</td>\n",
" <td>1.335218</td>\n",
" <td>0.739040</td>\n",
" <td>1.086630</td>\n",
" <td>0.762000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>16</td>\n",
" <td>1.292372</td>\n",
" <td>0.759595</td>\n",
" <td>1.043058</td>\n",
" <td>0.772000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>17</td>\n",
" <td>1.251726</td>\n",
" <td>0.779750</td>\n",
" <td>1.019319</td>\n",
" <td>0.794000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>18</td>\n",
" <td>1.243845</td>\n",
" <td>0.794283</td>\n",
" <td>0.990268</td>\n",
" <td>0.806000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>19</td>\n",
" <td>1.216266</td>\n",
" <td>0.805203</td>\n",
" <td>0.985682</td>\n",
" <td>0.812000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(20, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet50 for 20 epochs act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 50,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet50_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 51,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.224664</td>\n",
" <td>0.230689</td>\n",
" <td>2.100740</td>\n",
" <td>0.274000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.052242</td>\n",
" <td>0.316284</td>\n",
" <td>1.982163</td>\n",
" <td>0.336000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.992518</td>\n",
" <td>0.351694</td>\n",
" <td>1.986614</td>\n",
" <td>0.346000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.927298</td>\n",
" <td>0.395776</td>\n",
" <td>1.893158</td>\n",
" <td>0.394000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.880563</td>\n",
" <td>0.428457</td>\n",
" <td>1.651634</td>\n",
" <td>0.446000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>5</td>\n",
" <td>1.817068</td>\n",
" <td>0.466276</td>\n",
" <td>1.728359</td>\n",
" <td>0.484000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>6</td>\n",
" <td>1.781768</td>\n",
" <td>0.485788</td>\n",
" <td>1.883906</td>\n",
" <td>0.420000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>7</td>\n",
" <td>1.730777</td>\n",
" <td>0.514855</td>\n",
" <td>1.653171</td>\n",
" <td>0.484000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>8</td>\n",
" <td>1.681395</td>\n",
" <td>0.547133</td>\n",
" <td>1.581772</td>\n",
" <td>0.506000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>9</td>\n",
" <td>1.636926</td>\n",
" <td>0.570178</td>\n",
" <td>1.735935</td>\n",
" <td>0.456000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>10</td>\n",
" <td>1.590511</td>\n",
" <td>0.597158</td>\n",
" <td>1.431740</td>\n",
" <td>0.580000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>11</td>\n",
" <td>1.527189</td>\n",
" <td>0.632086</td>\n",
" <td>1.354265</td>\n",
" <td>0.654000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>12</td>\n",
" <td>1.503062</td>\n",
" <td>0.648225</td>\n",
" <td>1.277595</td>\n",
" <td>0.670000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>13</td>\n",
" <td>1.447690</td>\n",
" <td>0.671351</td>\n",
" <td>1.241779</td>\n",
" <td>0.702000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>14</td>\n",
" <td>1.403427</td>\n",
" <td>0.707644</td>\n",
" <td>1.173734</td>\n",
" <td>0.698000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>15</td>\n",
" <td>1.351855</td>\n",
" <td>0.730127</td>\n",
" <td>1.108114</td>\n",
" <td>0.736000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>16</td>\n",
" <td>1.302127</td>\n",
" <td>0.750763</td>\n",
" <td>1.070876</td>\n",
" <td>0.770000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>17</td>\n",
" <td>1.277606</td>\n",
" <td>0.776618</td>\n",
" <td>1.014119</td>\n",
" <td>0.790000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>18</td>\n",
" <td>1.252541</td>\n",
" <td>0.782480</td>\n",
" <td>1.017980</td>\n",
" <td>0.784000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" <tr>\n",
" <td>19</td>\n",
" <td>1.244521</td>\n",
" <td>0.792757</td>\n",
" <td>1.009158</td>\n",
" <td>0.786000</td>\n",
" <td>00:36</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(20, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Now try 80 Epochs"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet18 for 80 epochs act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 55,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet18_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.183085</td>\n",
" <td>0.233499</td>\n",
" <td>2.064838</td>\n",
" <td>0.268000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>2.036856</td>\n",
" <td>0.328649</td>\n",
" <td>1.955298</td>\n",
" <td>0.318000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.926130</td>\n",
" <td>0.398185</td>\n",
" <td>1.822344</td>\n",
" <td>0.402000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.848007</td>\n",
" <td>0.449896</td>\n",
" <td>1.712516</td>\n",
" <td>0.436000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.785628</td>\n",
" <td>0.482496</td>\n",
" <td>1.673672</td>\n",
" <td>0.458000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>5</td>\n",
" <td>1.731562</td>\n",
" <td>0.517344</td>\n",
" <td>1.538372</td>\n",
" <td>0.536000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>6</td>\n",
" <td>1.693706</td>\n",
" <td>0.543761</td>\n",
" <td>1.539260</td>\n",
" <td>0.534000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>7</td>\n",
" <td>1.652108</td>\n",
" <td>0.565441</td>\n",
" <td>1.454449</td>\n",
" <td>0.576000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>8</td>\n",
" <td>1.633477</td>\n",
" <td>0.581259</td>\n",
" <td>1.379177</td>\n",
" <td>0.612000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>9</td>\n",
" <td>1.606494</td>\n",
" <td>0.597720</td>\n",
" <td>1.387430</td>\n",
" <td>0.616000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>10</td>\n",
" <td>1.587440</td>\n",
" <td>0.598041</td>\n",
" <td>1.500325</td>\n",
" <td>0.540000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>11</td>\n",
" <td>1.558534</td>\n",
" <td>0.620443</td>\n",
" <td>1.330755</td>\n",
" <td>0.656000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>12</td>\n",
" <td>1.551259</td>\n",
" <td>0.623976</td>\n",
" <td>1.359921</td>\n",
" <td>0.616000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>13</td>\n",
" <td>1.539385</td>\n",
" <td>0.637305</td>\n",
" <td>1.311043</td>\n",
" <td>0.658000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>14</td>\n",
" <td>1.521479</td>\n",
" <td>0.649269</td>\n",
" <td>1.324049</td>\n",
" <td>0.634000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>15</td>\n",
" <td>1.510519</td>\n",
" <td>0.649269</td>\n",
" <td>1.327740</td>\n",
" <td>0.656000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>16</td>\n",
" <td>1.507671</td>\n",
" <td>0.649350</td>\n",
" <td>1.205164</td>\n",
" <td>0.676000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>17</td>\n",
" <td>1.479404</td>\n",
" <td>0.663642</td>\n",
" <td>1.226380</td>\n",
" <td>0.682000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>18</td>\n",
" <td>1.468316</td>\n",
" <td>0.676329</td>\n",
" <td>1.209722</td>\n",
" <td>0.694000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>19</td>\n",
" <td>1.464214</td>\n",
" <td>0.676811</td>\n",
" <td>1.294238</td>\n",
" <td>0.656000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>20</td>\n",
" <td>1.441304</td>\n",
" <td>0.685643</td>\n",
" <td>1.260472</td>\n",
" <td>0.688000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>21</td>\n",
" <td>1.436423</td>\n",
" <td>0.688935</td>\n",
" <td>1.345545</td>\n",
" <td>0.618000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>22</td>\n",
" <td>1.426463</td>\n",
" <td>0.694957</td>\n",
" <td>1.327027</td>\n",
" <td>0.660000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>23</td>\n",
" <td>1.432787</td>\n",
" <td>0.693994</td>\n",
" <td>1.338258</td>\n",
" <td>0.644000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>24</td>\n",
" <td>1.417736</td>\n",
" <td>0.699053</td>\n",
" <td>1.244542</td>\n",
" <td>0.670000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>25</td>\n",
" <td>1.403833</td>\n",
" <td>0.710936</td>\n",
" <td>1.233393</td>\n",
" <td>0.690000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>26</td>\n",
" <td>1.399267</td>\n",
" <td>0.709089</td>\n",
" <td>1.155323</td>\n",
" <td>0.694000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>27</td>\n",
" <td>1.397160</td>\n",
" <td>0.713185</td>\n",
" <td>1.165999</td>\n",
" <td>0.694000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>28</td>\n",
" <td>1.379686</td>\n",
" <td>0.720411</td>\n",
" <td>1.385822</td>\n",
" <td>0.606000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>29</td>\n",
" <td>1.373003</td>\n",
" <td>0.721294</td>\n",
" <td>1.170660</td>\n",
" <td>0.726000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>30</td>\n",
" <td>1.379021</td>\n",
" <td>0.722499</td>\n",
" <td>1.200504</td>\n",
" <td>0.694000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>31</td>\n",
" <td>1.363668</td>\n",
" <td>0.728200</td>\n",
" <td>1.202543</td>\n",
" <td>0.744000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>32</td>\n",
" <td>1.352575</td>\n",
" <td>0.736390</td>\n",
" <td>1.197765</td>\n",
" <td>0.718000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>33</td>\n",
" <td>1.358477</td>\n",
" <td>0.731572</td>\n",
" <td>1.243271</td>\n",
" <td>0.700000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>34</td>\n",
" <td>1.359124</td>\n",
" <td>0.737835</td>\n",
" <td>1.177767</td>\n",
" <td>0.708000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>35</td>\n",
" <td>1.335650</td>\n",
" <td>0.743777</td>\n",
" <td>1.132762</td>\n",
" <td>0.730000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>36</td>\n",
" <td>1.326976</td>\n",
" <td>0.749237</td>\n",
" <td>1.165978</td>\n",
" <td>0.736000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>37</td>\n",
" <td>1.328284</td>\n",
" <td>0.749318</td>\n",
" <td>1.067941</td>\n",
" <td>0.748000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>38</td>\n",
" <td>1.311462</td>\n",
" <td>0.756865</td>\n",
" <td>1.162699</td>\n",
" <td>0.716000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>39</td>\n",
" <td>1.311170</td>\n",
" <td>0.758391</td>\n",
" <td>1.112201</td>\n",
" <td>0.748000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>40</td>\n",
" <td>1.308434</td>\n",
" <td>0.760318</td>\n",
" <td>1.268662</td>\n",
" <td>0.672000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>41</td>\n",
" <td>1.292185</td>\n",
" <td>0.766581</td>\n",
" <td>1.069406</td>\n",
" <td>0.756000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>42</td>\n",
" <td>1.288650</td>\n",
" <td>0.765939</td>\n",
" <td>1.178188</td>\n",
" <td>0.734000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>43</td>\n",
" <td>1.290407</td>\n",
" <td>0.768508</td>\n",
" <td>1.063556</td>\n",
" <td>0.762000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>44</td>\n",
" <td>1.269911</td>\n",
" <td>0.778063</td>\n",
" <td>1.091261</td>\n",
" <td>0.776000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>45</td>\n",
" <td>1.270059</td>\n",
" <td>0.781115</td>\n",
" <td>1.140386</td>\n",
" <td>0.750000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>46</td>\n",
" <td>1.249129</td>\n",
" <td>0.793480</td>\n",
" <td>1.091112</td>\n",
" <td>0.748000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>47</td>\n",
" <td>1.246204</td>\n",
" <td>0.791633</td>\n",
" <td>0.997746</td>\n",
" <td>0.794000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>48</td>\n",
" <td>1.228930</td>\n",
" <td>0.799422</td>\n",
" <td>1.052374</td>\n",
" <td>0.772000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>49</td>\n",
" <td>1.223983</td>\n",
" <td>0.803758</td>\n",
" <td>1.064539</td>\n",
" <td>0.766000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>50</td>\n",
" <td>1.214212</td>\n",
" <td>0.811065</td>\n",
" <td>1.034258</td>\n",
" <td>0.792000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>51</td>\n",
" <td>1.207002</td>\n",
" <td>0.811145</td>\n",
" <td>1.027285</td>\n",
" <td>0.782000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>52</td>\n",
" <td>1.215687</td>\n",
" <td>0.813955</td>\n",
" <td>1.003717</td>\n",
" <td>0.790000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>53</td>\n",
" <td>1.183415</td>\n",
" <td>0.826000</td>\n",
" <td>1.039872</td>\n",
" <td>0.780000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>54</td>\n",
" <td>1.170074</td>\n",
" <td>0.832343</td>\n",
" <td>0.981274</td>\n",
" <td>0.804000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>55</td>\n",
" <td>1.163487</td>\n",
" <td>0.832584</td>\n",
" <td>1.049273</td>\n",
" <td>0.776000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>56</td>\n",
" <td>1.164070</td>\n",
" <td>0.838285</td>\n",
" <td>1.068435</td>\n",
" <td>0.772000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>57</td>\n",
" <td>1.148437</td>\n",
" <td>0.842942</td>\n",
" <td>0.980234</td>\n",
" <td>0.816000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>58</td>\n",
" <td>1.139802</td>\n",
" <td>0.850972</td>\n",
" <td>0.977949</td>\n",
" <td>0.826000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>59</td>\n",
" <td>1.131876</td>\n",
" <td>0.852417</td>\n",
" <td>0.952905</td>\n",
" <td>0.804000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>60</td>\n",
" <td>1.125654</td>\n",
" <td>0.858680</td>\n",
" <td>1.010411</td>\n",
" <td>0.802000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>61</td>\n",
" <td>1.108886</td>\n",
" <td>0.865987</td>\n",
" <td>0.972184</td>\n",
" <td>0.796000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>62</td>\n",
" <td>1.099800</td>\n",
" <td>0.872089</td>\n",
" <td>1.014797</td>\n",
" <td>0.810000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>63</td>\n",
" <td>1.102482</td>\n",
" <td>0.874498</td>\n",
" <td>0.973440</td>\n",
" <td>0.816000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>64</td>\n",
" <td>1.075797</td>\n",
" <td>0.886141</td>\n",
" <td>0.997300</td>\n",
" <td>0.808000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>65</td>\n",
" <td>1.072178</td>\n",
" <td>0.889353</td>\n",
" <td>0.929644</td>\n",
" <td>0.818000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>66</td>\n",
" <td>1.052844</td>\n",
" <td>0.892484</td>\n",
" <td>0.943376</td>\n",
" <td>0.826000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>67</td>\n",
" <td>1.049692</td>\n",
" <td>0.899390</td>\n",
" <td>0.933815</td>\n",
" <td>0.824000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>68</td>\n",
" <td>1.050222</td>\n",
" <td>0.897944</td>\n",
" <td>0.915714</td>\n",
" <td>0.828000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>69</td>\n",
" <td>1.029998</td>\n",
" <td>0.906295</td>\n",
" <td>0.894921</td>\n",
" <td>0.844000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>70</td>\n",
" <td>1.030690</td>\n",
" <td>0.909587</td>\n",
" <td>0.914151</td>\n",
" <td>0.840000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>71</td>\n",
" <td>1.020377</td>\n",
" <td>0.914485</td>\n",
" <td>0.923865</td>\n",
" <td>0.830000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>72</td>\n",
" <td>1.016820</td>\n",
" <td>0.913281</td>\n",
" <td>0.919113</td>\n",
" <td>0.830000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>73</td>\n",
" <td>1.013819</td>\n",
" <td>0.915850</td>\n",
" <td>0.906465</td>\n",
" <td>0.840000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>74</td>\n",
" <td>1.009261</td>\n",
" <td>0.918741</td>\n",
" <td>0.913897</td>\n",
" <td>0.832000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>75</td>\n",
" <td>1.010359</td>\n",
" <td>0.914485</td>\n",
" <td>0.916061</td>\n",
" <td>0.832000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>76</td>\n",
" <td>1.008466</td>\n",
" <td>0.922916</td>\n",
" <td>0.906287</td>\n",
" <td>0.842000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>77</td>\n",
" <td>1.000206</td>\n",
" <td>0.921792</td>\n",
" <td>0.907698</td>\n",
" <td>0.830000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>78</td>\n",
" <td>1.004182</td>\n",
" <td>0.920186</td>\n",
" <td>0.897255</td>\n",
" <td>0.838000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" <tr>\n",
" <td>79</td>\n",
" <td>1.010172</td>\n",
" <td>0.919142</td>\n",
" <td>0.906381</td>\n",
" <td>0.832000</td>\n",
" <td>00:15</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(80, cbsched)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Try xresnet34 for 80 epochs act_fn = LearnedRelu(leak=0,sub=0.25,maxv=10)"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [],
"source": [
"learn = cnn_learner(xresnet34_LR, data, loss_func, opt_func, lr=3e-3, norm=norm_imagenette, mixup=0.2)"
]
},
{
"cell_type": "code",
"execution_count": 58,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: left;\">\n",
" <th>epoch</th>\n",
" <th>train_loss</th>\n",
" <th>train_accuracy</th>\n",
" <th>valid_loss</th>\n",
" <th>valid_accuracy</th>\n",
" <th>time</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <td>0</td>\n",
" <td>2.160114</td>\n",
" <td>0.250683</td>\n",
" <td>2.032879</td>\n",
" <td>0.300000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>1</td>\n",
" <td>1.989940</td>\n",
" <td>0.354665</td>\n",
" <td>1.869959</td>\n",
" <td>0.366000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>2</td>\n",
" <td>1.877893</td>\n",
" <td>0.426289</td>\n",
" <td>1.784644</td>\n",
" <td>0.406000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>3</td>\n",
" <td>1.791382</td>\n",
" <td>0.483138</td>\n",
" <td>1.628368</td>\n",
" <td>0.476000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>4</td>\n",
" <td>1.721333</td>\n",
" <td>0.522884</td>\n",
" <td>1.475516</td>\n",
" <td>0.604000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>5</td>\n",
" <td>1.669802</td>\n",
" <td>0.556207</td>\n",
" <td>1.429776</td>\n",
" <td>0.606000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>6</td>\n",
" <td>1.626538</td>\n",
" <td>0.582544</td>\n",
" <td>1.373534</td>\n",
" <td>0.638000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>7</td>\n",
" <td>1.583188</td>\n",
" <td>0.607034</td>\n",
" <td>1.312699</td>\n",
" <td>0.646000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>8</td>\n",
" <td>1.546728</td>\n",
" <td>0.623334</td>\n",
" <td>1.335154</td>\n",
" <td>0.642000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>9</td>\n",
" <td>1.527774</td>\n",
" <td>0.634254</td>\n",
" <td>1.317735</td>\n",
" <td>0.650000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>10</td>\n",
" <td>1.511202</td>\n",
" <td>0.647021</td>\n",
" <td>1.262573</td>\n",
" <td>0.664000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>11</td>\n",
" <td>1.492661</td>\n",
" <td>0.658664</td>\n",
" <td>1.351534</td>\n",
" <td>0.650000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>12</td>\n",
" <td>1.480991</td>\n",
" <td>0.662598</td>\n",
" <td>1.238872</td>\n",
" <td>0.688000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>13</td>\n",
" <td>1.458835</td>\n",
" <td>0.676088</td>\n",
" <td>1.241144</td>\n",
" <td>0.670000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>14</td>\n",
" <td>1.447215</td>\n",
" <td>0.679942</td>\n",
" <td>1.218073</td>\n",
" <td>0.698000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>15</td>\n",
" <td>1.443984</td>\n",
" <td>0.683555</td>\n",
" <td>1.311375</td>\n",
" <td>0.644000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>16</td>\n",
" <td>1.419848</td>\n",
" <td>0.692388</td>\n",
" <td>1.219093</td>\n",
" <td>0.686000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>17</td>\n",
" <td>1.431424</td>\n",
" <td>0.691906</td>\n",
" <td>1.184902</td>\n",
" <td>0.712000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>18</td>\n",
" <td>1.425543</td>\n",
" <td>0.693592</td>\n",
" <td>1.184222</td>\n",
" <td>0.710000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>19</td>\n",
" <td>1.400145</td>\n",
" <td>0.704914</td>\n",
" <td>1.195787</td>\n",
" <td>0.716000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>20</td>\n",
" <td>1.411569</td>\n",
" <td>0.700578</td>\n",
" <td>1.242890</td>\n",
" <td>0.676000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>21</td>\n",
" <td>1.403789</td>\n",
" <td>0.702907</td>\n",
" <td>1.250455</td>\n",
" <td>0.684000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>22</td>\n",
" <td>1.390105</td>\n",
" <td>0.710615</td>\n",
" <td>1.243778</td>\n",
" <td>0.700000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>23</td>\n",
" <td>1.384676</td>\n",
" <td>0.712863</td>\n",
" <td>1.258259</td>\n",
" <td>0.702000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>24</td>\n",
" <td>1.380739</td>\n",
" <td>0.720572</td>\n",
" <td>1.229500</td>\n",
" <td>0.678000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>25</td>\n",
" <td>1.384768</td>\n",
" <td>0.707965</td>\n",
" <td>1.229199</td>\n",
" <td>0.680000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>26</td>\n",
" <td>1.368647</td>\n",
" <td>0.718404</td>\n",
" <td>1.266520</td>\n",
" <td>0.688000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>27</td>\n",
" <td>1.363121</td>\n",
" <td>0.727236</td>\n",
" <td>1.273916</td>\n",
" <td>0.668000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>28</td>\n",
" <td>1.355545</td>\n",
" <td>0.733258</td>\n",
" <td>1.206790</td>\n",
" <td>0.700000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>29</td>\n",
" <td>1.353972</td>\n",
" <td>0.730609</td>\n",
" <td>1.328246</td>\n",
" <td>0.660000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>30</td>\n",
" <td>1.348174</td>\n",
" <td>0.735426</td>\n",
" <td>1.171493</td>\n",
" <td>0.718000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>31</td>\n",
" <td>1.330921</td>\n",
" <td>0.742011</td>\n",
" <td>1.156076</td>\n",
" <td>0.720000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>32</td>\n",
" <td>1.334388</td>\n",
" <td>0.746507</td>\n",
" <td>1.191647</td>\n",
" <td>0.680000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>33</td>\n",
" <td>1.313794</td>\n",
" <td>0.749398</td>\n",
" <td>1.184678</td>\n",
" <td>0.704000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>34</td>\n",
" <td>1.318251</td>\n",
" <td>0.747952</td>\n",
" <td>1.149335</td>\n",
" <td>0.720000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>35</td>\n",
" <td>1.318720</td>\n",
" <td>0.753172</td>\n",
" <td>1.223824</td>\n",
" <td>0.676000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>36</td>\n",
" <td>1.295299</td>\n",
" <td>0.762165</td>\n",
" <td>1.195395</td>\n",
" <td>0.686000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>37</td>\n",
" <td>1.296676</td>\n",
" <td>0.763048</td>\n",
" <td>1.173169</td>\n",
" <td>0.724000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>38</td>\n",
" <td>1.288696</td>\n",
" <td>0.769391</td>\n",
" <td>1.108190</td>\n",
" <td>0.746000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>39</td>\n",
" <td>1.287138</td>\n",
" <td>0.761282</td>\n",
" <td>1.158875</td>\n",
" <td>0.722000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>40</td>\n",
" <td>1.278288</td>\n",
" <td>0.770194</td>\n",
" <td>1.131309</td>\n",
" <td>0.724000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>41</td>\n",
" <td>1.260418</td>\n",
" <td>0.780633</td>\n",
" <td>1.149602</td>\n",
" <td>0.750000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>42</td>\n",
" <td>1.267339</td>\n",
" <td>0.781034</td>\n",
" <td>1.086866</td>\n",
" <td>0.758000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>43</td>\n",
" <td>1.246584</td>\n",
" <td>0.790348</td>\n",
" <td>1.071241</td>\n",
" <td>0.764000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>44</td>\n",
" <td>1.245827</td>\n",
" <td>0.785129</td>\n",
" <td>1.022305</td>\n",
" <td>0.804000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>45</td>\n",
" <td>1.252315</td>\n",
" <td>0.788181</td>\n",
" <td>1.098190</td>\n",
" <td>0.746000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>46</td>\n",
" <td>1.235155</td>\n",
" <td>0.795728</td>\n",
" <td>1.059782</td>\n",
" <td>0.776000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>47</td>\n",
" <td>1.229969</td>\n",
" <td>0.797174</td>\n",
" <td>1.080939</td>\n",
" <td>0.754000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>48</td>\n",
" <td>1.212854</td>\n",
" <td>0.808415</td>\n",
" <td>1.004722</td>\n",
" <td>0.802000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>49</td>\n",
" <td>1.205482</td>\n",
" <td>0.807853</td>\n",
" <td>1.019931</td>\n",
" <td>0.786000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>50</td>\n",
" <td>1.208016</td>\n",
" <td>0.811065</td>\n",
" <td>1.021939</td>\n",
" <td>0.788000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>51</td>\n",
" <td>1.188594</td>\n",
" <td>0.819255</td>\n",
" <td>0.953768</td>\n",
" <td>0.808000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>52</td>\n",
" <td>1.168243</td>\n",
" <td>0.829212</td>\n",
" <td>1.094303</td>\n",
" <td>0.762000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>53</td>\n",
" <td>1.168406</td>\n",
" <td>0.831379</td>\n",
" <td>1.007799</td>\n",
" <td>0.810000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>54</td>\n",
" <td>1.148780</td>\n",
" <td>0.837562</td>\n",
" <td>0.985216</td>\n",
" <td>0.784000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>55</td>\n",
" <td>1.148547</td>\n",
" <td>0.839971</td>\n",
" <td>0.954891</td>\n",
" <td>0.812000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>56</td>\n",
" <td>1.146820</td>\n",
" <td>0.841577</td>\n",
" <td>0.954819</td>\n",
" <td>0.818000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>57</td>\n",
" <td>1.118567</td>\n",
" <td>0.852015</td>\n",
" <td>1.007315</td>\n",
" <td>0.806000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>58</td>\n",
" <td>1.113733</td>\n",
" <td>0.859804</td>\n",
" <td>0.990414</td>\n",
" <td>0.814000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>59</td>\n",
" <td>1.103602</td>\n",
" <td>0.859965</td>\n",
" <td>0.994342</td>\n",
" <td>0.804000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>60</td>\n",
" <td>1.094002</td>\n",
" <td>0.866629</td>\n",
" <td>0.979499</td>\n",
" <td>0.804000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>61</td>\n",
" <td>1.081726</td>\n",
" <td>0.875462</td>\n",
" <td>1.019249</td>\n",
" <td>0.804000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>62</td>\n",
" <td>1.080395</td>\n",
" <td>0.872973</td>\n",
" <td>0.934275</td>\n",
" <td>0.828000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>63</td>\n",
" <td>1.074989</td>\n",
" <td>0.881484</td>\n",
" <td>0.946802</td>\n",
" <td>0.830000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>64</td>\n",
" <td>1.054163</td>\n",
" <td>0.885659</td>\n",
" <td>0.941103</td>\n",
" <td>0.826000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>65</td>\n",
" <td>1.054038</td>\n",
" <td>0.886864</td>\n",
" <td>0.932271</td>\n",
" <td>0.844000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>66</td>\n",
" <td>1.042438</td>\n",
" <td>0.894893</td>\n",
" <td>0.944209</td>\n",
" <td>0.832000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>67</td>\n",
" <td>1.026499</td>\n",
" <td>0.901237</td>\n",
" <td>0.940699</td>\n",
" <td>0.826000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>68</td>\n",
" <td>1.014939</td>\n",
" <td>0.909346</td>\n",
" <td>0.907188</td>\n",
" <td>0.846000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>69</td>\n",
" <td>1.010093</td>\n",
" <td>0.911113</td>\n",
" <td>0.911446</td>\n",
" <td>0.840000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>70</td>\n",
" <td>1.003785</td>\n",
" <td>0.914726</td>\n",
" <td>0.925195</td>\n",
" <td>0.834000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>71</td>\n",
" <td>1.002480</td>\n",
" <td>0.915047</td>\n",
" <td>0.914233</td>\n",
" <td>0.852000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>72</td>\n",
" <td>0.994600</td>\n",
" <td>0.920829</td>\n",
" <td>0.947542</td>\n",
" <td>0.826000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>73</td>\n",
" <td>1.001678</td>\n",
" <td>0.917697</td>\n",
" <td>0.937793</td>\n",
" <td>0.828000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>74</td>\n",
" <td>0.991792</td>\n",
" <td>0.923238</td>\n",
" <td>0.919605</td>\n",
" <td>0.830000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>75</td>\n",
" <td>0.990258</td>\n",
" <td>0.922033</td>\n",
" <td>0.916568</td>\n",
" <td>0.838000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>76</td>\n",
" <td>0.984087</td>\n",
" <td>0.925968</td>\n",
" <td>0.914005</td>\n",
" <td>0.842000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>77</td>\n",
" <td>0.981209</td>\n",
" <td>0.928858</td>\n",
" <td>0.921912</td>\n",
" <td>0.836000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" <tr>\n",
" <td>78</td>\n",
" <td>0.990391</td>\n",
" <td>0.924281</td>\n",
" <td>0.925526</td>\n",
" <td>0.842000</td>\n",
" <td>00:19</td>\n",
" </tr>\n",
" <tr>\n",
" <td>79</td>\n",
" <td>0.992413</td>\n",
" <td>0.925486</td>\n",
" <td>0.914091</td>\n",
" <td>0.840000</td>\n",
" <td>00:20</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"learn.fit(80, cbsched)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python [default]",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment