Skip to content

Instantly share code, notes, and snippets.

View vashineyu's full-sized avatar

SeanYu vashineyu

  • Taipei, Taiwan
View GitHub Profile
def gradcam_plus(model, im, class_select, layer, image_size, preproc_fn, alpha=0.6, filter_threshold=0.5):
"""GradCAM method for visualizing input saliency.
Args:
model: keras model
im: single image (with only RGB, [H,W,C])
class_select: class to show
layer: layer name
image_size: tuple of image H,W
preproc_fn: preprocessing function
alpha: alpha
# CAM
import cv2
from PIL import Image
import tensorflow as tf
from tensorflow.python.keras import backend as K
def grad_cam_keras(model, im, class_select, layer, image_size, preproc_fn, alpha=0.6, filter_threshold=0.5):
"""GradCAM method for visualizing input saliency.
Args:
model: keras model
import sys
import json
from config_initalize import get_cfg_defaults
cfg = get_cfg_defaults()
sys.path.append(cfg.LAB_TOOLS)
print(cfg)
from organize_data import initalize_folders, search_raw_files, Link_origin_to_soft, merge_dicts
initalize_folders(cfg)
raw_files = []
import glob
import os
import shutil
def initalize_folders(cfg):
"""Initalize orgainzed folder.
Args:
cfg (object): configuration object
"""
import sys
import json
from config_initalize import get_cfg_defaults
cfg = get_cfg_defaults()
sys.path.append(cfg.LAB_TOOLS)
print(cfg)
from organize_data import initalize_folders, search_raw_files, Link_origin_to_soft, merge_dicts
initalize_folders(cfg)
raw_files = []
import glob
import os
import shutil
def initalize_folders(cfg):
"""Initalize orgainzed folder.
Args:
cfg (object): configuration object
"""
class GetDataset():
"""Claim Dataset object for inferencing.
Args:
slide_name (str): full path to slide
f_inputs_preproc (function): preprocessing function to array
patch_size (int): patch size of array
stride (int): stride of image when get patches
level (int): get patch at level
def get_large_image(slide_handler, dpl_list, sz = 256):
"""
Given partial coordinate list, open them at once and return a large image.
This is a modification from last version, which dynamically generate patches from slide and predict. However, that method request a interacte with openslide object too frequently, which cause a core-dump. In this version, we read the slide image as a whole then split them with normal operations.
Args:
- slide_handler: openslide object (SLIDE_OPENER object)
- dpl_list: list of coordinates in tuple
- sz: patch size
Return:
- large image: numpy array of partial WSI in high resolution (about 1/n_gpus patches of a WSI)
"""Data generator for model
GetDataset: Get single data with next
Customized dataloader: Compose multiple dataset object together and put them into multi-processing flow
ver1. all patches were taken from single slide util N patches have been taken.
"""
import cv2
import os
import json
# model.py
import tensorflow as tf
import tensorflow.keras.layers as layers
import tensorflow.keras.models as models
import tensorflow.nn as F
class Conv_bn_relu(models.Model):
"""Stack blocks of Conv2D->BN->relu.