Skip to content

Instantly share code, notes, and snippets.

View imflash217's full-sized avatar
🔭
महाजनो येन गतः स पन्थाः ॥

Vinay Kumar imflash217

🔭
महाजनो येन गतः स पन्थाः ॥
View GitHub Profile
from IPython.display import HTML, Image
from google.colab.output import eval_js
from base64 import b64decode

# setup the canvas
canvas_html = """
<canvas width=%d height=%d></canvas>
<button>Done!</button>
<script>
##########################################################################################
import torch
import torch.nn.Functional as F
import pytorch_lightning as pl
##########################################################################################
class FlashModel(pl.LightningModule):
def __init__(self, model):
super().__init__()
self.model = model
@imflash217
imflash217 / gist_to_github_repo.md
Created January 14, 2021 23:56 — forked from ishu3101/gist_to_github_repo.md
Transfer a gist to a GitHub repository

Transfer a gist to a GitHub repository

clone the gist

git clone https://gist.github.com/ishu3101/6fb35afd237e42ef25f9

rename the directory

mv 6fb35afd237e42ef25f9 ConvertTo-Markdown

change the working directory to the newly renamed directory

cd ConvertTo-Markdown

import torch
import torch.nn.Functional as F
import pytorch_lightning as pl
##########################################################################################
class FlashModel(pl.LightningModule):
"""DOCSTRING"""
def __init__(self, model):
super().__init__()
import torch
import torch.nn.Functional as F
import pytorch_lightning as pl
###########################################################################################
class FlashModel(pl.LightningModule):
"""DOCSTRING"""
def __init__(self, model):
super().__init__()
import torch
import torch.nn.Functional as F
import pytorch_lightning as pl
###########################################################################################
## Pytorch_Lightning version
##
class FlashModel(pl.LightningModule):
"""DOCSTRING"""
def __init__(self, model):
import os
import torch
import torch.nn.Functional as F
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
import pytorch_lightning as pl
###########################################################################################
# A LightningModule ORGANIZES the PyTorch code into the following modules:
# 1. Computations (init)
# 2. Training loop (training_step)
# 3. Validation loop (validation_step)
# 4. Test loop (test_step)
# 5. Optimizers (configure_optimizers)
##############################################################################
model = FlashModel()
trainer = Trainer()
### DATALOADERS ##################################################################
# When building DataLoaders. Set `num_workers>0` and `pin_memory=True`
DataLoader(dataset, num_workers=8, pin_memory=True)
### num_workers ##################################################################
# num_workers depends on the batch size and the machine
# A general place to start is to set num_workers = number of CPUs in the machine.
# Increasing num_workers all increases the CPU usage
# BEST TIP: Increase num_workers slowly and stop when there is no performance increase.
@imflash217
imflash217 / FlashModel.py
Last active January 13, 2021 20:51
PyTorch Lightning Model
import torch as pt
import pytorch_lightning as pl
#######################################################################
class FlashModel(pl.LightningModule):
"""This defines a MODEL"""
def __init__(self, num_layers: int = 3):
super().__init__()
self.layer1 = pt.nn.Linear()
self.layer2 = pt.nn.Linear()