Skip to content

Instantly share code, notes, and snippets.

View alex-petrenko's full-sized avatar
🏎️
Focusing

Aleksei Petrenko alex-petrenko

🏎️
Focusing
View GitHub Profile
@alex-petrenko
alex-petrenko / ffmpeg_batch.py
Created June 18, 2021 21:35
ffmpeg_batch.py
import os
import sys
import argparse
import subprocess
from os import listdir
from os.path import isfile, join
def main():
@alex-petrenko
alex-petrenko / batch_grade.py
Last active May 11, 2021 04:31
Batch grading script
"""
This script clones students' github repos with HW solutions, checks out a commit specified by the student,
and runs corresponding tests, generating a directory with reports.
Just place the script into the root of cloned "tests" repo and change global variables below appropriately.
Then run as "python batch_grade.py"
"""
from typing import Tuple
import torch
import torch.nn as nn
from torch.nn.utils.rnn import PackedSequence, invert_permutation
def _build_pack_info_from_dones(
dones, T: int
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
#include <Corrade/Containers/GrowableArray.h>
#include <Corrade/Containers/Optional.h>
#include <Magnum/GL/Buffer.h>
#include <Magnum/GL/DefaultFramebuffer.h>
#include <Magnum/GL/Mesh.h>
#include <Magnum/GL/Renderer.h>
#include <Magnum/Math/Color.h>
#include <Magnum/Math/Matrix4.h>
#include <Magnum/MeshTools/Interleave.h>
class VizdoomEnvMultiplayer(VizdoomEnv):
def __init__(self, level, player_id, num_players, skip_frames, level_map='map01'):
super().__init__(level, skip_frames=skip_frames, level_map=level_map)
self.player_id = player_id
self.num_players = num_players
self.timestep = 0
self.update_state = True
def _is_server(self):
@alex-petrenko
alex-petrenko / dmlab_gym_with_cache.py
Created April 8, 2019 02:13
dmlab_gym_with_cache.py
import os
import shutil
import time
from os.path import join
import cv2
import deepmind_lab
import gym
import numpy as np
from gym.utils import seeding
import sys
import math
import random
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
def encoder(observation):
with tf.variable_scope('encoder', reuse=tf.AUTO_REUSE):