Skip to content

Instantly share code, notes, and snippets.

View dddzg's full-sized avatar
🎯
Focusing

dddzg dddzg

🎯
Focusing
  • SCUT
  • Canton, China
View GitHub Profile
@jeasinema
jeasinema / weight_init.py
Last active May 25, 2023 09:32
A simple script for parameter initialization for PyTorch
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
import torch
import torch.nn as nn
import torch.nn.init as init
def weight_init(m):
'''
@yzh119
yzh119 / st-gumbel.py
Created January 12, 2018 12:25
ST-Gumbel-Softmax-Pytorch
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def sample_gumbel(shape, eps=1e-20):
U = torch.rand(shape).cuda()
return -Variable(torch.log(-torch.log(U + eps) + eps))
@kaniblu
kaniblu / rnn_init.py
Created October 26, 2017 05:14
PyTorch LSTM and GRU Orthogonal Initialization and Positive Bias
def init_gru(cell, gain=1):
cell.reset_parameters()
# orthogonal initialization of recurrent weights
for _, hh, _, _ in cell.all_weights:
for i in range(0, hh.size(0), cell.hidden_size):
I.orthogonal(hh[i:i + cell.hidden_size], gain=gain)
def init_lstm(cell, gain=1):