Skip to content

Instantly share code, notes, and snippets.

View vaibkumr's full-sized avatar
🌊
Gradient wave off kanagawa

Vaibhav Kumar vaibkumr

🌊
Gradient wave off kanagawa
View GitHub Profile
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@vaibkumr
vaibkumr / Fixed point problem.ipynb
Created January 6, 2020 20:56
Fixed point problem
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import torch
t = torch.tensor([1.111111111])
t_q = torch.quantize_per_tensor(t, 0.1, 10, torch.quint8)
#output: tensor([21], dtype=torch.uint8)
print(t_q.int_repr())
#output: tensor([1.1000], size=(1,), dtype=torch.quint8, quantization_scheme=torch.per_tensor_affine, scale=0.1, zero_point=10)
print(t_q)
import torch
# Binary operator on named tensors
t1 = torch.randn(4, names=('X'))
t2 = torch.randn(4)
t3 = t1 * t2
t3.names #output: ('X',)
import torch
# Unary operator on named tensors
t = torch.randn(4, 2, names=('N', 'C'))
t = t.abs()
t.names #output: ('N', 'C')
import torch
batch1= torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
batch2 = torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
batch3 = torch.zeros(64, 3, 100, 100)
batch4 = torch.zeros(64, 3, 100, 100)
batch5 = torch.zeros(64, 3, 100, 100, names=('N', 'C', 'W', 'H'))
#Name tuple strings are equal so name matches
res1 = batch1 + batch2
import torch
# The following will produce no error as the dimensions match
batch1= torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
batch2 = torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
batch3 = batch1 + batch2
# The following will produce an error as the dimensions don't match
batch1= torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
batch2 = torch.zeros(64, 3, 100, 100, names=('N', 'C', 'W', 'H'))
import torch
batch = torch.zeros(64, 3, 100, 100, names=('N', 'C', 'H', 'W'))
print(batch.shape) #torch.Size([64, 3, 100, 100])
batch = batch.align_to('N', 'H', 'W', 'C')
print(batch.shape) #torch.Size([64, 100, 100, 3])
#####################
# Before PyTorch v1.3
#####################
import torch
batch = torch.zeros(2, 3, 2, 2, 2, 2, 2, 2, 2, 2)
print(batch.shape)
batch = batch.permute([0, 2, 1, 3, 4, 5, 6, 7, 8, 9])
print(batch.shape)