Skip to content

Instantly share code, notes, and snippets.

View lostella's full-sized avatar
🌴
On vacation

Lorenzo Stella lostella

🌴
On vacation
View GitHub Profile

Keybase proof

I hereby claim:

  • I am lostella on github.
  • I am lostella (https://keybase.io/lostella) on keybase.
  • I have a public key whose fingerprint is FBAB AFA4 81E5 F7BA E3D7 9DD3 ACE0 41D6 6164 F46C

To claim this, I am signing this object:

Keybase proof

I hereby claim:

  • I am lostella on github.
  • I am lostella (https://keybase.io/lostella) on keybase.
  • I have a public key ASAiQnm-kLkopRgiLCGDKC5w5ZcA-ehLSCEZAp-HYjaxLAo

To claim this, I am signing this object:

import mxnet as mx
class MyBlock(mx.gluon.HybridBlock):
def __init__(self):
super().__init__()
with self.name_scope():
self.lstm = mx.gluon.rnn.HybridSequentialRNNCell()
for layer in range(3):
self.lstm.add(mx.gluon.rnn.LSTMCell(hidden_size=20))
import mxnet as mx
class MyBlock(mx.gluon.HybridBlock):
def __init__(self):
super().__init__()
with self.name_scope():
self.lstmcell = mx.gluon.rnn.LSTMCell(hidden_size=20)
def hybrid_forward(self, F, seq):
outputs, state = self.lstmcell.unroll(inputs=seq, length=10, layout="NTC", merge_outputs=True)
from pathlib import Path
from typing import List
class IndexedFile:
def __init__(self, path: Path) -> None:
self.path = path
self.offset: List[int] = []
self._build_index()
@lostella
lostella / loader_example.py
Created September 7, 2020 11:16
GluonTS data loading example
import time
from functools import partial
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import mxnet as mx
import torch
@lostella
lostella / loader_sanity_check.py
Created September 7, 2020 11:17
GluonTS data loader sanity check
import time
import mxnet as mx
from gluonts.dataset.repository.datasets import get_dataset
from gluonts.dataset.loader import TrainDataLoader
from gluonts.model.deepar import DeepAREstimator
from gluonts.mx.batchify import batchify as mx_batchify
dataset = get_dataset("electricity")
@lostella
lostella / gluonts_datasets_sanity_check.py
Created May 4, 2021 13:56
Sanity check on GluonTS provided datasets
from gluonts.dataset.repository.datasets import get_dataset, dataset_names
def check_train_test_split(dataset):
prediction_length = dataset.metadata.prediction_length
train_end = {}
for entry in dataset.train:
assert entry["item_id"] not in train_end, f"item {k} is duplicate"
train_end[entry["item_id"]] = entry["start"] + len(entry["target"]) * entry["start"].freq
@lostella
lostella / vimrc
Last active December 8, 2022 12:34
syntax on
filetype plugin indent on
set tabstop=4 softtabstop=4 shiftwidth=4 expandtab smarttab autoindent
set incsearch ignorecase smartcase hlsearch
set wildmode=longest,list,full wildmenu
set ruler laststatus=2 showcmd showmode
set list listchars=trail:»,tab:»-
set fillchars+=vert:\
set wrap breakindent
set encoding=utf-8
@lostella
lostella / load_m5_gluonts.py
Last active April 24, 2024 05:47
Loading M5 competition data into a gluonts PandasDataset
# Works on gluonts dev branch as of May 30th, 2023
# Assumes "m5-forecasting-accuracy" folder with data next to the script
# Data is obtained from https://www.kaggle.com/c/m5-forecasting-accuracy
import pandas as pd
from pathlib import Path
from gluonts.dataset.pandas import PandasDataset
# Load data from csv files