Skip to content

Instantly share code, notes, and snippets.

Avatar
💭
status

Davis Bennett d-v-b

💭
status
  • HHMI / Janelia
  • Ashburn, VA, USA
  • 09:49 (UTC -04:00)
View GitHub Profile
@d-v-b
d-v-b / zarr_pydantic_test.py
Created May 30, 2023 15:33
Integrating pydantic with zarr for typed zarr hierarchies.
View zarr_pydantic_test.py
from __future__ import annotations
from typing import (
Any,
Dict, Generic, Iterable, Literal, TypeVar,
TypedDict, Union, Protocol, runtime_checkable)
from pydantic import ValidationError
from pydantic.generics import GenericModel
from zarr.storage import init_group, BaseStore
import zarr
View napari_cosem.py
import zarr
import napari
import click
# Challenge napari with some big data.
# Compare with how neuroglancer handles the same dataset:
# http://neuroglancer-demo.appspot.com/#!%7B%22dimensions%22:%7B%22x%22:%5B4e-9%2C%22m%22%5D%2C%22y%22:%5B4e-9%2C%22m%22%5D%2C%22z%22:%5B4e-9%2C%22m%22%5D%7D%2C%22position%22:%5B5877.4033203125%2C7050.5%2C5252.34033203125%5D%2C%22crossSectionScale%22:36.598234443678%2C%22projectionScale%22:16384%2C%22layers%22:%5B%7B%22type%22:%22image%22%2C%22source%22:%22n5://https://janelia-cosem-datasets.s3.amazonaws.com/jrc_fly-fsb-1/jrc_fly-fsb-1.n5/em/fibsem-uint16%22%2C%22tab%22:%22rendering%22%2C%22shaderControls%22:%7B%22normalized%22:%7B%22range%22:%5B1773%2C4458%5D%7D%7D%2C%22name%22:%22fibsem-uint16%22%7D%5D%2C%22selectedLayer%22:%7B%22visible%22:true%2C%22layer%22:%22fibsem-uint16%22%7D%2C%22layout%22:%224panel%22%7D
containerPath = 's3://janelia-cosem-datasets/jrc_fly-fsb-1/jrc_fly-fsb-1.n5'
groupPath = 'em/fibsem-uint16'
@d-v-b
d-v-b / multiscale_save_test.py
Created July 21, 2021 17:49
Example of "saving" an increasingly downsampled dask array chunk-by-chunk.
View multiscale_save_test.py
import dask.array as da
from dask import delayed
import time
import numpy as np
import distributed
from distributed import LocalCluster, Client, performance_report
from datetime import datetime
def blocks(self, index, key_array):
"""
View gist:df6661ba007d2ee75a9a24bf5406c3d7
http://neuroglancer-demo.appspot.com/#!%7B%22dimensions%22:%7B%22x%22:%5B1e-9%2C%22m%22%5D%2C%22y%22:%5B1e-9%2C%22m%22%5D%2C%22z%22:%5B1e-9%2C%22m%22%5D%7D%2C%22position%22:%5B24000.5%2C3200.5%2C16684.5%5D%2C%22crossSectionScale%22:50%2C%22projectionScale%22:65536%2C%22layers%22:%5B%7B%22type%22:%22segmentation%22%2C%22source%22:%5B%22n5://https://janelia-cosem.s3.amazonaws.com/jrc_hela-2/jrc_hela-2.n5/labels/endo_seg%22%2C%22precomputed://https://janelia-cosem.s3.amazonaws.com/jrc_hela-2/neuroglancer/mesh/endo_seg%22%5D%2C%22tab%22:%22source%22%2C%22segments%22:%5B%221%22%2C%2210%22%2C%22100%22%2C%221000%22%2C%221001%22%2C%221002%22%2C%221003%22%2C%221004%22%2C%221005%22%2C%221006%22%2C%221007%22%2C%221008%22%2C%221009%22%2C%22101%22%2C%221010%22%2C%221011%22%2C%221012%22%2C%221013%22%2C%221014%22%2C%221015%22%2C%221016%22%2C%221017%22%2C%221018%22%2C%221019%22%2C%22102%22%2C%221020%22%2C%221021%22%2C%221022%22%2C%221023%22%2C%221024%22%2C%221025%22%2C%221026%22%2C%221027%22%2C%221028%22%2C%221029%22%2C%2210
View forCaroline.py
from scipy.ndimage.filters import gaussian_filter
import numpy as np
def generate_mask(array, sigma, percentile, percentile_downsampling=100, threshold=.4):
smoothed = gaussian_filter(array, sigma=sigma, mode='nearest')
lower = np.percentile(smoothed.ravel()[::percentile_downsampling], percentile)
mean = smoothed.mean()
level = lower + (threshold * (mean - lower))
mask = (smoothed > level).astype(array.dtype)
return mask
View napari_big_data_test.py
import numpy as np
import dask.array as da
import numpy as np
import time
import napari
scales = [1,2,4,8,16]
chunks = (128,) * 3
full_size = (8192,8192,8192)
View n5-view.cmd
java -Xmx31g -XX:+UseConcMarkSweepGC -cp C:\Users\parkw\.m2\repository\org\janelia\saalfeldlab\n5-utils\0.0.5-SNAPSHOT\n5-utils-0.0.5-SNAPSHOT.jar;C:\Users\parkw\.m2\repository\net\imglib2\imglib2\5.8.0\imglib2-5.8.0.jar;^
C:\Users\parkw\.m2\repository\org\janelia\saalfeldlab\n5\2.1.4\n5-2.1.4.jar;^
C:\Users\parkw\.m2\repository\org\tukaani\xz\1.8\xz-1.8.jar;^
C:\Users\parkw\.m2\repository\org\lz4\lz4-java\1.5.0\lz4-java-1.5.0.jar;^
C:\Users\parkw\.m2\repository\com\google\code\gson\gson\2.8.5\gson-2.8.5.jar;^
C:\Users\parkw\.m2\repository\org\scijava\scijava-common\2.82.0\scijava-common-2.82.0.jar;^
C:\Users\parkw\.m2\repository\org\scijava\parsington\1.0.4\parsington-1.0.4.jar;^
C:\Users\parkw\.m2\repository\org\bushe\eventbus\1.4\eventbus-1.4.jar;^
C:\Users\parkw\.m2\repository\org\apache\commons\commons-compress\1.18\commons-compress-1.18.jar;^
C:\Users\parkw\.m2\repository\org\janelia\saalfeldlab\n5-imglib2\3.4.1\n5-imglib2-3.4.1.jar;^
View cosem-introduction.md

Introduction to COSEM

The COSEM (Cellular Organelle Segmentation in Electron Microscopy) project team at Janelia Research campus uses computer vision techniques to scalably detect subcellular structures in datasets generated with next-generation volumetric electron microscopy.

COSEM processes datasets that are acquired by members of the Hess lab at Janelia Research Campus using Focused Ion Beam - Scanning Electron Microscopy (FIB-SEM). FIB-SEM is an electron microscopy technique that enables volumetric imaging of single cells and / or bulk tissue at nanometer isotropic resolution. For more information about FIB-SEM microscopy and its applications, see these publications: Xu et al., Hoffman et al..

FIB-SEM datasets are large (hundreds of gigabytes), dense, and extremely detailed, which poses a challenge for image pr