Last active
November 4, 2024 15:29
-
-
Save NicoKiaru/142258b2eef49e3bb1bb5b0309c81e00 to your computer and use it in GitHub Desktop.
Takes a bioformats compatible file and batch exports individual series, optionally Z Projected and rescaled #BIOP #Fiji #BigDataViewer
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// You need to activate https://biop.epfl.ch/Fiji-Bdv-Playground/ for this script to function | |
// It takes a file and batch exports individual series, optionally Z Projected and rescaled | |
// Deals pretty correctly with Big dataset + saves multiresolution OME-TIFFs file, keeping the positional metadata | |
// Nicolas Chiaruttini, BIOP, EPFL, 2022 | |
#@File(label="Dataset") image_file | |
#@String(label="Give a name to your dataset") dataset_name | |
#@String( label = "Select a subset of channels (0 based, comma separated). Leave blank for all", value="") range_channels | |
#@String( label = "Selected slices (0 based, comma separated). Leave blank for all", required = false ) range_slices | |
#@String( label = "Selected frames (0 based, comma separated). Leave blank for all", required = false ) range_frames | |
#@Boolean( label = "Perform Z Projection") z_project | |
#@String(label="Z Projection Method", choices={"Average Intensity", "Max Intensity", "Min Intensity", "Sum Slices", "Standard Deviation", "Median"}) z_project_method | |
#@Integer(label="Resize in (XY)", value=1) resize | |
#@File(label="Output directory", style="directory") output_directory | |
#@Boolean(label = "Show Images", value = false) show_images | |
#@Integer(label="Number of images processed in parallel", value=2) n_max_threads | |
#@Integer(label="Number of resolution levels", value=4) nResolutionLevels | |
#@Integer(label="Downscaling fator", value=2) downscaleFactor | |
#@String(label="Compression", choices={"LZW","Uncompressed"}) compression | |
#@SourceAndConverterService sac_service | |
#@CommandService command | |
#@ConvertService convert | |
#@TaskService taskService | |
def task = taskService.createTask("OME TIFF conversion "+image_file.getName()) | |
task.setStatusMessage("Parsing file metadata...") | |
sac_service.remove(sac_service.getSourceAndConverters().toArray(new SourceAndConverter[0])) | |
command.run(CreateBdvDatasetBioFormatsCommand.class,true, | |
"datasetname", dataset_name, | |
"unit","MICROMETER", | |
"files", [image_file] as File[], | |
"split_rgb_channels", false, | |
"auto_pyramidize", false, | |
"plane_origin_convention", "TOP LEFT", | |
"disable_memo", "false").get() | |
def allSources = convert.convert(dataset_name, SourceAndConverter[].class) | |
IJ.log("Total number of sources="+allSources.length) | |
/** | |
* Removing all z offsets to the image | |
*/ | |
for (source: allSources) { | |
center = SourceAndConverterHelper.getSourceAndConverterCenterPoint(source,0) | |
AffineTransform3D zOffset = new AffineTransform3D() | |
zOffset.translate(0,0,center.getDoublePosition(2)) | |
SourceTransformHelper.append(zOffset.inverse(), new SourceAndConverterAndTimeRange(source,0)) | |
} | |
// Uses the Bdv UI to fetch the number of series ( how many nodes are there below dataset>SeriesNumber ? | |
def datasetPath = sac_service.getUI().getTreePathFromString(dataset_name+">SeriesIndex") | |
def int number_of_series = datasetPath.getLastPathComponent().getChildCount(); | |
task.setProgressMaximum(number_of_series) | |
def iImage = new AtomicInteger() | |
def start = Instant.now(); | |
task.setStatusMessage("Conversion in progress") | |
def lock = new Object() | |
GParsExecutorsPool.withPool(n_max_threads as int) { | |
(0..number_of_series-1).eachParallel{ series_index -> // | |
// Converts as virtual image plus | |
def ij1_images=command.run(ExportToMultipleImagePlusCommand.class, false, | |
"sacs", dataset_name+">Tile>"+series_index, | |
"level", 0, | |
"range_frames", range_frames, | |
"range_channels", range_channels, | |
"range_slices", range_slices, | |
"export_mode", "Virtual no-cache", // Because we only read once! | |
"parallel",Boolean.TRUE, | |
"verbose",Boolean.TRUE, | |
).get().getOutput("imps_out") | |
if (ij1_images.size()!=1) IJ.log("ERROR : ONE IMAGE EXPECTED, MULTIPLE ONES FOUND") | |
def image = ij1_images.get(0) | |
IJ.log("Processing "+image.getTitle()) | |
// Z Project | |
if (z_project) { | |
def iniTitle = image.getTitle(); | |
def cal = image.getCalibration().copy(); | |
ZProjector zp = new ZProjector(); | |
zp.setImage(image); | |
zp.setMethod(Arrays.asList(ZProjector.METHODS).indexOf(z_project_method)); | |
zp.setStopSlice(image.getNSlices()); | |
if (image.getNSlices() > 1 || image.getNFrames() > 1) { | |
zp.doHyperStackProjection(true); | |
} | |
def initialBitDepth = image.getBitDepth() | |
image = zp.getProjection(); | |
image.setTitle(iniTitle+"_ZProj_"+z_project_method) | |
cal.zOrigin = 0 // remove z offset when projecting | |
image.setCalibration(cal) | |
// Restores bit depth | |
if (image.getBitDepth() != initialBitDepth) { | |
def ic = new ImageConverter(image) | |
ic.setDoScaling(false) | |
switch (initialBitDepth) { | |
case 8: | |
ic.convertToGray8() | |
break | |
case 16: | |
ic.convertToGray16() | |
break | |
default: | |
IJ.log("Conversion from "+image.getBitDepth()+" to "+initialBitDepth+" unsupported") | |
} | |
} | |
} | |
// Resizes the image if set by the user | |
if (resize!=1) { | |
def iniTitle = image.getTitle(); | |
def cal = image.getCalibration().copy() | |
cal.pixelWidth = cal.pixelWidth * resize as double | |
cal.pixelHeight = cal.pixelHeight * resize as double | |
cal.xOrigin = cal.xOrigin / resize as double | |
cal.yOrigin = cal.yOrigin / resize as double | |
image = Scaler.resize( | |
image, image.getWidth()/resize as int, | |
image.getHeight()/resize as int, | |
image.getNSlices() as int, | |
"bilinear") | |
image.setTitle(iniTitle+"_RescaledXY_"+resize) | |
image.setCalibration(cal) | |
} | |
if(show_images) {image.show()} | |
def totalPath = output_directory.getAbsolutePath()+File.separator+image_file.getName()+"_"+image.getTitle()+".ome.tiff" | |
new Thread(() -> { | |
ImagePlusToOMETiff.writeToOMETiff(image, new File(totalPath), compression, taskService); | |
synchronized (lock) { | |
printTimingMessage(start, ((double)(iImage.incrementAndGet())/(double)(number_of_series))*100) | |
long currentProgress = iImage.get(); | |
task.setProgressValue(currentProgress); | |
if (currentProgress==number_of_series) { | |
task.run(() -> {}) // finished task | |
} | |
} | |
}).start(); | |
} | |
} | |
return "Done" | |
void printTimingMessage(Instant start, double percentageCompleteness) { | |
long s = Duration.between(start, Instant.now()).getSeconds(); | |
String elapsedTime = String.format("%d:%02d:%02d", (s / 3600) as int, ((s % 3600) / 60) as int, (s % 60) as int); | |
double sPerPC = s / percentageCompleteness; | |
long sRemaining = (long) ((100 - percentageCompleteness) * sPerPC); | |
String remainingTime = String.format("%d:%02d:%02d", (sRemaining / 3600) as int, ((sRemaining % 3600) / 60) as int, (sRemaining % 60) as int); | |
LocalDateTime estimateDoneJob = LocalDateTime.now().plus(Duration.ofSeconds(sRemaining)); | |
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("HH:mm"); | |
long nDays = sRemaining / (3600 * 24); | |
String daysMessage = ""; | |
if (nDays == 1) { | |
daysMessage += " tomorrow."; | |
} | |
if (nDays == 1) { | |
daysMessage += " in " + nDays + " days."; | |
} | |
String formatDateTime = estimateDoneJob.format(formatter) | |
if ((percentageCompleteness as int)== 100) { | |
String message = ' - Task completed. Elapsed time:' + elapsedTime + '.' | |
IJ.log(message); | |
} else { | |
String message = ' - Task ' + (percentageCompleteness as int) + ' % completed. Elapsed time:' + elapsedTime + '. Estimated remaining time: ' + remainingTime + '. Job done at around ' + formatDateTime + daysMessage | |
IJ.log(message); | |
} | |
} | |
import ch.epfl.biop.scijava.command.source.ExportToMultipleImagePlusCommand | |
import ch.epfl.biop.bdv.img.bioformats.command.CreateBdvDatasetBioFormatsCommand | |
import sc.fiji.bdvpg.scijava.command.source.BasicTransformerCommand | |
import bdv.viewer.SourceAndConverter | |
import sc.fiji.bdvpg.sourceandconverter.SourceAndConverterHelper | |
import sc.fiji.bdvpg.sourceandconverter.transform.SourceTransformHelper | |
import sc.fiji.bdvpg.sourceandconverter.SourceAndConverterAndTimeRange | |
import net.imglib2.realtransform.AffineTransform3D | |
import ij.plugin.ZProjector | |
import ij.plugin.Scaler | |
import groovyx.gpars.GParsPool.* | |
import groovyx.gpars.* | |
import java.util.concurrent.atomic.AtomicInteger | |
import ij.IJ | |
import org.apache.commons.io.FilenameUtils | |
import java.time.Duration | |
import java.time.Instant | |
import java.time.LocalDateTime | |
import java.time.format.DateTimeFormatter | |
import ch.epfl.biop.ImagePlusToOMETiff | |
import ij.process.ImageConverter |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment