Skip to content

Instantly share code, notes, and snippets.

"""
Space efficient save and load of depth maps
"""
from ctypes import c_ubyte, c_float
import numpy as np
from imageio.v3 import imread, imwrite
def depth_encode(dmap):
@eldar
eldar / log.txt
Last active December 14, 2022 10:18
Torch Dynamo bug
Running DDP training on rank 1.
Running DDP training on rank 0.
0%| | 0/1000 [00:00<?, ?it/s]
0%| | 0/1000 [00:00<?, ?it/s][2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping __init__ /users/eldar/apps/anaconda3/envs/ca3d/lib/python3.10/contextlib.py
[2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping __enter__ /users/eldar/apps/anaconda3/envs/ca3d/lib/python3.10/contextlib.py
[2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping __init__ /users/eldar/apps/anaconda3/envs/ca3d/lib/python3.10/contextlib.py
[2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping __enter__ /users/eldar/apps/anaconda3/envs/ca3d/lib/python3.10/contextlib.py
[2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping enable_dynamic /users/eldar/apps/anaconda3/envs/ca3d/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py
[2022-12-14 10:11:05,736] torch._dynamo.eval_frame: [DEBUG] skipping forward /users/eldar/apps/anaconda3/envs/ca3d/li
Running DDP training on rank 1.
Running DDP training on rank 0.
r.device before call: cuda:1 rank: 1
r.device inside call: cuda:0 rank: 1
[W reducer.cpp:1298] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
Traceback (most recent call last):
File "/users/eldar/src/hybridrf/train_dynamo_bug.py", line 77, in <module> mp.spawn(train,
@eldar
eldar / trim_trim.py
Last active June 2, 2022 19:39
The script parses latex file, finds the \includegraphics commands with trim and crops the images producing a new latex document without trims.
from pathlib import Path
import re
import argparse
import imageio
import numpy as np
from tqdm import tqdm
def crop_or_pad(img, crop):
from typing import Tuple
import torch
from pytorch3d.common.linear_with_repeat import LinearWithRepeat
from pytorch3d.renderer import HarmonicEmbedding, ray_bundle_to_ray_points, RayBundle
def _xavier_init(linear):
"""
Performs the Xavier weight initialization of the linear layer `linear`.
import math
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import collections
from matplotlib import colors as mcolors
# F: 3x3 fundamental matrix maps points in image I1 to epipolar lines in image I2
# F = (K2^T)^-1 * E * K1^-1
# E = R [t]_x
class ConfConverter:
def __init__(self, filename):
self.filename = filename
self.f = open(filename, "w")
def __del__(self):
self.f.close()
def add_argument(self, name, type=None, default=None, required=None, choices=[], help="", nargs=None, action=None):
f = self.f
@eldar
eldar / tf-resnet-fcn.py
Last active September 11, 2017 06:20
import datetime as dt
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.contrib.slim.nets import resnet_v1
import threading
from PoseDataset import PoseDataset
from TrainParams import TrainParams
@eldar
eldar / ldc_jit_api.d
Created April 3, 2012 18:20
API for LDC JIT
import ldc.JIT;
void main()
{
Compiler compiler = new Compiler();
compiler.importPaths = [ "/home/eldar/ldc/druntime/import",
"/home/eldar/mylib/" ];
compiler.emitModuleInfo = false;
Module mod = compiler.compile("extern(C) int helloWorld() { return 15; }");