Skip to content

Instantly share code, notes, and snippets.

View cbaziotis's full-sized avatar

Christos Baziotis cbaziotis

View GitHub Profile
@BasPH
BasPH / black_selection.sh
Created December 11, 2018 19:53
Black on selection
#!/usr/bin/env bash
set -x
black=$1
input_file=$2
start_line=$3
end_line=$4
# Read selected lines and write to tmpfile
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import torch.nn.functional as F
import matplotlib.pyplot as plt
import numpy as np
@JinhaiZ
JinhaiZ / connection.py
Created November 19, 2017 16:39
connect to MongoDB via ssh tunnel
from sshtunnel import SSHTunnelForwarder
import pymongo
MONGO_HOST = "IP_ADDRESS"
MONGO_USER = "USERNAME"
MONGO_PASS = "PASSWORD"
MONGO_DB = "DATABASE_NAME"
MONGO_COLLECTION = "COLLECTION_NAME"
# define ssh tunnel
@georgepar
georgepar / ipynb2pdf.py
Created October 29, 2017 19:43
A script to convert an ipython notebook to pdf with support for greek language
#!/usr/bin/env python3
"""
Credits https://github.com/ivanychev/learning/blob/master/Python/ipynb2pdf/ipynb2pdf.py
Current version of Jupyter doesn't support pdf exporting when it comes to
greek language in the document. To fix this, current script has born.
It requires nbconvert as long as jupyter to be installed.
Author: Sergey Ivanychev
@njellinas
njellinas / autoencoder_extra.py
Created October 26, 2017 14:46
Two Keras Layer-Class definitions for implementing Weight-Tying and for loading pretrained weights in Deep Autoencoders
import keras.backend as K
from keras.layers import Layer
from keras.legacy import interfaces
from keras.engine import InputSpec
from keras import activations, initializers, regularizers, constraints
class DenseTransposeTied(Layer):
@interfaces.legacy_dense_support
@fchollet
fchollet / new_stacked_rnns.py
Last active August 13, 2019 15:23
New stacked RNNs in Keras
import keras
import numpy as np
timesteps = 60
input_dim = 64
samples = 10000
batch_size = 128
output_dim = 64
# Test data.
# coding: utf-8
import logging
import re
from collections import Counter
import numpy as np
import torch
from sklearn.datasets import fetch_20newsgroups
from torch.autograd import Variable
@jihunchoi
jihunchoi / masked_cross_entropy.py
Last active January 22, 2024 19:20
PyTorch workaround for masking cross entropy loss
def _sequence_mask(sequence_length, max_len=None):
if max_len is None:
max_len = sequence_length.data.max()
batch_size = sequence_length.size(0)
seq_range = torch.range(0, max_len - 1).long()
seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len)
seq_range_expand = Variable(seq_range_expand)
if sequence_length.is_cuda:
seq_range_expand = seq_range_expand.cuda()
seq_length_expand = (sequence_length.unsqueeze(1)
from keras.engine.topology import Layer
from keras import initializations
from keras import backend as K
class Attention(Layer):
'''Attention operation for temporal data.
# Input shape
3D tensor with shape: `(samples, steps, features)`.
# Output shape
2D tensor with shape: `(samples, features)`.
@MaximumEntropy
MaximumEntropy / moses_tokenizer.py
Last active November 11, 2017 10:23
Simple python interface to the moses tokenizer
import subprocess
import sys
tokenizer_path = sys.argv[1] # Path to the moses tokenizer mosesdecoder/scripts/tokenizer.perl
text = sys.argv[2] # Text to be tokenized
lang = sys.argv[3] # Input language ex: en, fr, de
pipe = subprocess.Popen(["perl", tokenizer_path, '-l', lang, text], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
pipe.stdin.write(text.encode('utf-8'))
pipe.stdin.close()