Skip to content

Instantly share code, notes, and snippets.

View d-v-b's full-sized avatar
💭
status

Davis Bennett d-v-b

💭
status
  • Independent software engineer
  • Würzburg, Germany
  • 17:40 (UTC +02:00)
View GitHub Profile
@d-v-b
d-v-b / dacapo_demo_data.py
Last active March 4, 2024 15:28
copy demo data from s3 for dacapo
from __future__ import annotations
from concurrent.futures import ThreadPoolExecutor, as_completed, wait
import os
import time
from typing import Any, Dict, List
import click
import numpy as np
import zarr
from zarr.storage import FSStore
import logging
"""
Demo of TypedDict for payloads for a method on a dataclass that returns a new instance with updated attributes.
"""
from dataclasses import dataclass
from typing import TypedDict
class FooUpdate(TypedDict):
prop_a: int
prop_b: str
@d-v-b
d-v-b / create_dataarray.py
Created November 7, 2023 21:09
crappy ome-ngff to xarray
from xarray_ome_ngff.registry import get_adapters
import zarr
from typing import Union
import dask.array as da
from xarray import DataArray
import os
def infer_coords(group: zarr.Group, array: zarr.Array):
# these conditionals should be handled by a lower-level validation function
if 'multiscales' in group.attrs:
@d-v-b
d-v-b / zarr_pydantic_test.py
Created May 30, 2023 15:33
Integrating pydantic with zarr for typed zarr hierarchies.
from __future__ import annotations
from typing import (
Any,
Dict, Generic, Iterable, Literal, TypeVar,
TypedDict, Union, Protocol, runtime_checkable)
from pydantic import ValidationError
from pydantic.generics import GenericModel
from zarr.storage import init_group, BaseStore
import zarr
import zarr
import napari
import click
# Challenge napari with some big data.
# Compare with how neuroglancer handles the same dataset:
# http://neuroglancer-demo.appspot.com/#!%7B%22dimensions%22:%7B%22x%22:%5B4e-9%2C%22m%22%5D%2C%22y%22:%5B4e-9%2C%22m%22%5D%2C%22z%22:%5B4e-9%2C%22m%22%5D%7D%2C%22position%22:%5B5877.4033203125%2C7050.5%2C5252.34033203125%5D%2C%22crossSectionScale%22:36.598234443678%2C%22projectionScale%22:16384%2C%22layers%22:%5B%7B%22type%22:%22image%22%2C%22source%22:%22n5://https://janelia-cosem-datasets.s3.amazonaws.com/jrc_fly-fsb-1/jrc_fly-fsb-1.n5/em/fibsem-uint16%22%2C%22tab%22:%22rendering%22%2C%22shaderControls%22:%7B%22normalized%22:%7B%22range%22:%5B1773%2C4458%5D%7D%7D%2C%22name%22:%22fibsem-uint16%22%7D%5D%2C%22selectedLayer%22:%7B%22visible%22:true%2C%22layer%22:%22fibsem-uint16%22%7D%2C%22layout%22:%224panel%22%7D
containerPath = 's3://janelia-cosem-datasets/jrc_fly-fsb-1/jrc_fly-fsb-1.n5'
groupPath = 'em/fibsem-uint16'
@d-v-b
d-v-b / multiscale_save_test.py
Created July 21, 2021 17:49
Example of "saving" an increasingly downsampled dask array chunk-by-chunk.
import dask.array as da
from dask import delayed
import time
import numpy as np
import distributed
from distributed import LocalCluster, Client, performance_report
from datetime import datetime
def blocks(self, index, key_array):
"""
http://neuroglancer-demo.appspot.com/#!%7B%22dimensions%22:%7B%22x%22:%5B1e-9%2C%22m%22%5D%2C%22y%22:%5B1e-9%2C%22m%22%5D%2C%22z%22:%5B1e-9%2C%22m%22%5D%7D%2C%22position%22:%5B24000.5%2C3200.5%2C16684.5%5D%2C%22crossSectionScale%22:50%2C%22projectionScale%22:65536%2C%22layers%22:%5B%7B%22type%22:%22segmentation%22%2C%22source%22:%5B%22n5://https://janelia-cosem.s3.amazonaws.com/jrc_hela-2/jrc_hela-2.n5/labels/endo_seg%22%2C%22precomputed://https://janelia-cosem.s3.amazonaws.com/jrc_hela-2/neuroglancer/mesh/endo_seg%22%5D%2C%22tab%22:%22source%22%2C%22segments%22:%5B%221%22%2C%2210%22%2C%22100%22%2C%221000%22%2C%221001%22%2C%221002%22%2C%221003%22%2C%221004%22%2C%221005%22%2C%221006%22%2C%221007%22%2C%221008%22%2C%221009%22%2C%22101%22%2C%221010%22%2C%221011%22%2C%221012%22%2C%221013%22%2C%221014%22%2C%221015%22%2C%221016%22%2C%221017%22%2C%221018%22%2C%221019%22%2C%22102%22%2C%221020%22%2C%221021%22%2C%221022%22%2C%221023%22%2C%221024%22%2C%221025%22%2C%221026%22%2C%221027%22%2C%221028%22%2C%221029%22%2C%2210
from scipy.ndimage.filters import gaussian_filter
import numpy as np
def generate_mask(array, sigma, percentile, percentile_downsampling=100, threshold=.4):
smoothed = gaussian_filter(array, sigma=sigma, mode='nearest')
lower = np.percentile(smoothed.ravel()[::percentile_downsampling], percentile)
mean = smoothed.mean()
level = lower + (threshold * (mean - lower))
mask = (smoothed > level).astype(array.dtype)
return mask