Mercurial > repos > lldelisle > omero_hyperstack_to_gastruloid_measurements
changeset 1:bd5771ff6aa3 draft
planemo upload for repository https://github.com/lldelisle/tools-lldelisle/tree/master/tools/omero_hyperstack_to_gastruloid_measurements commit aec2e2ead02236551c1e9c6d333034057daf0bca
author | lldelisle |
---|---|
date | Wed, 20 Dec 2023 20:07:13 +0000 |
parents | 5396ab665901 |
children | 45375dc5dd8d |
files | 1-omero_timelapse_image_to_measurements_phase.groovy README.md omero_hyperstack_to_gastruloid_measurements.xml |
diffstat | 3 files changed, 1017 insertions(+), 309 deletions(-) [+] |
line wrap: on
line diff
--- a/1-omero_timelapse_image_to_measurements_phase.groovy Fri Mar 24 13:03:39 2023 +0000 +++ b/1-omero_timelapse_image_to_measurements_phase.groovy Wed Dec 20 20:07:13 2023 +0000 @@ -1,10 +1,11 @@ // This macro was written by the BIOP (https://github.com/BIOP) // Romain Guiet and Rémy Dornier // Lucille Delisle modified to support headless +// And to be more robust to OMERO reboot // merge the analysis script with templates available at -// https://github.com/BIOP/OMERO-scripts/tree/main/Fiji +// https://github.com/BIOP/OMERO-scripts/tree/025047955b5c1265e1a93b259c1de4600d00f107/Fiji -// Last modification: 2023-03-24 +// Last modification: 2023-12-20 /* * = COPYRIGHT = @@ -28,20 +29,35 @@ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -// This macro will use ilastik to detect ROIs +// This macro will use ilastik or convert to mask +// to detect ROIs // measure and compute elongation index +// It may also regenerate a ROI of background // The input image(s) may have multiple time stacks // It may have multiple channels // If multiple channels are present, the LUT will be // used to determine which channel should be used. +// The option keep_only_largest (by default to true) +// Allows to keep only the largest ROI for each stack + +// The option 'rescue' allows to only process images without ROIs and +// tables and generate the final table + +// Without this option, the job will fail if a ROI or a table exists + +// If a final table exists it will fail in both modes + +// The option use_existing allows to +// Recompute only spine + // This macro works both in headless // or GUI // In both modes, // The result table and the result ROI are sent to omero -// The measures are: Area,Perim.,Circ.,Feret,FeretX,FeretY,FeretAngle,MinFeret,AR,Round,Solidity,Unit,Date,Version,IlastikProject,ProbabilityThreshold,MinSizeParticle,MinDiameter,ClosenessTolerance,MinSimilarity,RadiusMedian,BaseImage,ROI,Time,ROI_type,LargestRadius,SpineLength,ElongationIndex +// The measures are: Area,Perim.,Circ.,Feret,FeretX,FeretY,FeretAngle,MinFeret,AR,Round,Solidity,Unit,Date,Version,IlastikProject,ProbabilityThreshold,MinSizeParticle,MinDiameter,ClosenessTolerance,MinSimilarity,RadiusMedian,BaseImage,ROI,Time,ROI_type,XCentroid,YCentroid,LargestRadius,SpineLength,ElongationIndex[,Date_rerun_spine,Version_rerun_spine] // LargestRadius and SpineLength are set to 0 if no circle was found. // ElongationIndex is set to 0 if a gastruloid was found and to -1 if no gastruloid was found. @@ -53,6 +69,7 @@ import fr.igred.omero.annotations.TableWrapper import fr.igred.omero.Client import fr.igred.omero.repository.DatasetWrapper +import fr.igred.omero.repository.GenericRepositoryObjectWrapper import fr.igred.omero.repository.ImageWrapper import fr.igred.omero.repository.PlateWrapper import fr.igred.omero.repository.PixelsWrapper @@ -63,13 +80,16 @@ import ij.gui.Overlay import ij.gui.PolygonRoi import ij.gui.Roi +import ij.gui.ShapeRoi import ij.IJ import ij.io.FileSaver +import ij.measure.ResultsTable import ij.plugin.Concatenator import ij.plugin.Duplicator import ij.plugin.frame.RoiManager import ij.plugin.HyperStackConverter import ij.plugin.ImageCalculator +import ij.plugin.Thresholder import ij.Prefs import ij.process.FloatPolygon import ij.process.ImageProcessor @@ -77,6 +97,8 @@ import java.awt.Color import java.awt.GraphicsEnvironment import java.io.File +import java.util.concurrent.TimeUnit +import java.util.stream.Collectors import loci.plugins.in.ImporterOptions @@ -88,7 +110,290 @@ import org.apache.commons.io.FileUtils import org.ilastik.ilastik4ij.ui.* -def processDataset(Client user_client, DatasetWrapper dataset_wpr, + +// Global variable with times in minutes to wait: +waiting_times = [0, 10, 60, 360, 600] + +def robustlyGetAll(GenericRepositoryObjectWrapper obj_wrp, String object_type, Client user_client) { + for (waiting_time in waiting_times) { + try { + wrappers = null + switch (object_type) { + case "image": + wrappers = obj_wrp.getImages(user_client) + break + case "dataset": + wrappers = obj_wrp.getDatasets(user_client) + break + case "well": + wrappers = obj_wrp.getWells(user_client) + break + case "project": + wrappers = obj_wrp.getProjects(user_client) + break + case "plate": + wrappers = obj_wrp.getPlates(user_client) + break + case "screen": + wrappers = obj_wrp.getScreens(user_client) + break + } + return wrappers + } catch(Exception e) { + println("Could not get " + object_type + " for " + obj_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyGetOne(Long id, String object_type, Client user_client) { + for (waiting_time in waiting_times) { + try { + + wrapper = null + switch (object_type) { + case "image": + warpper = user_client.getImage(id) + break + case "dataset": + warpper = user_client.getDataset(id) + break + case "well": + warpper = user_client.getWell(id) + break + case "project": + warpper = user_client.getProject(id) + break + case "plate": + warpper = user_client.getPlate(id) + break + case "screen": + warpper = user_client.getScreen(id) + break + } + return warpper + } catch(Exception e) { + println("Could not get " + object_type + " id " + id + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyGetTables(GenericRepositoryObjectWrapper obj_wrp,Client user_client) { + for (waiting_time in waiting_times) { + try { + return obj_wrp.getTables(user_client) + } catch(Exception e) { + println("Could not get tables for " + obj_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyGetROIs(ImageWrapper image_wrp,Client user_client) { + for (waiting_time in waiting_times) { + try { + return image_wrp.getROIs(user_client) + } catch(Exception e) { + println("Could not get ROIs for " + image_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyHasAnyTable(GenericRepositoryObjectWrapper obj_wrp, String object_type, Client user_client) { + if (robustlyGetTables(obj_wrp, user_client).size() > 0) { + return true + } else { + for (image_wrp in robustlyGetAll(obj_wrp, "image", user_client)) { + if (robustlyGetTables(image_wrp, user_client).size() > 0) { + return true + } + } + } + return false +} + +def robustlyHasAnyROI(GenericRepositoryObjectWrapper obj_wrp, Client user_client) { + for (image_wrp in robustlyGetAll(obj_wrp, "image", user_client)) { + if (robustlyGetROIs(image_wrp, user_client).size() > 0) { + return true + } + } + return false +} + + +def robustlyDeleteTables(GenericRepositoryObjectWrapper obj_wrp,Client user_client) { + for (waiting_time in waiting_times) { + try { + obj_wrp.getTables(user_client).each{ + user_client.delete(it) + } + return + } catch(Exception e) { + println("Could not remove tables for " + obj_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyDeleteROIs(ImageWrapper image_wrp, Client user_client, List<ROIWrapper> rois) { + for (waiting_time in waiting_times) { + try { + // Remove existing ROIs + // image_wrp.getROIs(user_client).each{ user_client.delete(it) } + // Caused failure due to too high number of 'servantsPerSession' + // Which reached 10k + // I use see https://github.com/GReD-Clermont/simple-omero-client/issues/59 + user_client.delete((Collection<ROIWrapper>) rois) + return + } catch(Exception e) { + println("Could not remove ROIs for " + image_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyAddAndReplaceTable(GenericRepositoryObjectWrapper obj_wrp, Client user_client, TableWrapper table) { + for (waiting_time in waiting_times) { + try { + obj_wrp.addAndReplaceTable(user_client, table) + return + } catch(Exception e) { + println("Could not add table to " + obj_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlytoImagePlus(ImageWrapper image_wrp, Client user_client) { + for (waiting_time in waiting_times) { + try { + return image_wrp.toImagePlus(user_client) + } catch(Exception e) { + println("Could not convert to image plus " + image_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlysaveROIs(ImageWrapper image_wrp, Client user_client, List<ROIWrapper> rois) { + for (waiting_time in waiting_times) { + try { + image_wrp.saveROIs(user_client, rois) + return + } catch(Exception e) { + println("Could not add ROIs to " + image_wrp + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyNewTableWrapper(Client user_client, ResultsTable results, Long imageId, List<? extends Roi> ijRois, String roiProperty) { + for (waiting_time in waiting_times) { + try { + return new TableWrapper(user_client, results, imageId, ijRois, roiProperty) + } catch(Exception e) { + println("Could not generate new table for image " + imageId + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def robustlyAddRows(TableWrapper table, Client user_client, ResultsTable results, Long imageId, List<? extends Roi> ijRois, String roiProperty) { + for (waiting_time in waiting_times) { + try { + table.addRows(user_client, results, imageId, ijRois, roiProperty) + return + } catch(Exception e) { + if (e.getClass().equals(java.lang.IllegalArgumentException)) { + throw e + } + println("Could not add rows for image " + imageId + " waiting " + waiting_time + " minutes and trying again.") + println e + TimeUnit.MINUTES.sleep(waiting_time) + last_exception = e + if (!user_client.isConnected()) { + println("Has been deconnected. Will reconnect.") + user_client.connect(host, port, USERNAME, PASSWORD.toCharArray()) + } + } + } + throw last_exception +} + +def processDataset(Client user_client, DatasetWrapper dataset_wrp, File ilastik_project, String ilastik_project_type, Integer ilastik_label_OI, Double probability_threshold, Double radius_median, @@ -96,20 +401,26 @@ Integer minimum_diameter, Integer closeness_tolerance, Double min_similarity, String ilastik_project_short_name, File output_directory, - Boolean headless_mode, Boolean debug, String tool_version) { - dataset_wpr.getImages(user_client).each{ ImageWrapper img_wpr -> - processImage(user_client, img_wpr, + Boolean headless_mode, Boolean debug, String tool_version, + Boolean use_existing, String final_object, Boolean rescue, + Integer ilastik_label_BG, Double probability_threshold_BG, + Boolean keep_only_largest, String segmentation_method) { + robustlyGetAll(dataset_wrp, "image", user_client).each{ ImageWrapper img_wrp -> + processImage(user_client, img_wrp, ilastik_project, ilastik_project_type, ilastik_label_OI, probability_threshold, radius_median, min_size_particle, get_spine, minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) + headless_mode, debug, tool_version, + use_existing, final_object, rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) } } -def processSinglePlate(Client user_client, PlateWrapper plate_wpr, +def processSinglePlate(Client user_client, PlateWrapper plate_wrp, File ilastik_project, String ilastik_project_type, Integer ilastik_label_OI, Double probability_threshold, Double radius_median, @@ -117,20 +428,26 @@ Integer minimum_diameter, Integer closeness_tolerance, Double min_similarity, String ilastik_project_short_name, File output_directory, - Boolean headless_mode, Boolean debug, String tool_version) { - plate_wpr.getWells(user_client).each{ well_wpr -> - processSingleWell(user_client, well_wpr, + Boolean headless_mode, Boolean debug, String tool_version, Boolean use_existing, + String final_object, Boolean rescue, + Integer ilastik_label_BG, Double probability_threshold_BG, + Boolean keep_only_largest, String segmentation_method) { + robustlyGetAll(plate_wrp, "well", user_client).each{ well_wrp -> + processSingleWell(user_client, well_wrp, ilastik_project, ilastik_project_type, ilastik_label_OI, probability_threshold, radius_median, min_size_particle, get_spine, minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) + headless_mode, debug, tool_version, + use_existing, final_object, rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) } } -def processSingleWell(Client user_client, WellWrapper well_wpr, +def processSingleWell(Client user_client, WellWrapper well_wrp, File ilastik_project, String ilastik_project_type, Integer ilastik_label_OI, Double probability_threshold, Double radius_median, @@ -138,8 +455,11 @@ Integer minimum_diameter, Integer closeness_tolerance, Double min_similarity, String ilastik_project_short_name, File output_directory, - Boolean headless_mode, Boolean debug, String tool_version) { - well_wpr.getWellSamples().each{ + Boolean headless_mode, Boolean debug, String tool_version, Boolean use_existing, + String final_object, Boolean rescue, + Integer ilastik_label_BG, Double probability_threshold_BG, + Boolean keep_only_largest, String segmentation_method) { + well_wrp.getWellSamples().each{ processImage(user_client, it.getImage(), ilastik_project, ilastik_project_type, ilastik_label_OI, probability_threshold, @@ -147,11 +467,14 @@ minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) + headless_mode, debug, tool_version, + use_existing, final_object, rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) } } -def processImage(Client user_client, ImageWrapper image_wpr, +def processImage(Client user_client, ImageWrapper image_wrp, File ilastik_project, String ilastik_project_type, // String ilastik_strategy, Integer ilastik_label_OI, Double probability_threshold, Double radius_median, Integer min_size_particle, @@ -159,7 +482,10 @@ Integer minimum_diameter, Integer closeness_tolerance, Double min_similarity, String ilastik_project_short_name, File output_directory, - Boolean headless_mode, Boolean debug, String tool_version) { + Boolean headless_mode, Boolean debug, String tool_version, + Boolean use_existing, String final_object, Boolean rescue, + Integer ilastik_label_BG, Double probability_threshold_BG, + Boolean keep_only_largest, String segmentation_method) { IJ.run("Close All", "") IJ.run("Clear Results") @@ -172,29 +498,37 @@ // Print image information println "\n Image infos" - String image_basename = image_wpr.getName() - println ("Image_name : " + image_basename + " / id : " + image_wpr.getId()) - List<DatasetWrapper> dataset_wpr_list = image_wpr.getDatasets(user_client) + String image_basename = image_wrp.getName() + println ("Image_name : " + image_basename + " / id : " + image_wrp.getId()) + List<DatasetWrapper> dataset_wrp_list = robustlyGetAll(image_wrp, "dataset", user_client) // if the image is part of a dataset - if(!dataset_wpr_list.isEmpty()){ - dataset_wpr_list.each{println("dataset_name : "+it.getName()+" / id : "+it.getId())}; - image_wpr.getProjects(user_client).each{println("Project_name : "+it.getName()+" / id : "+it.getId())}; + if(!dataset_wrp_list.isEmpty()){ + dataset_wrp_list.each{ + println("dataset_name : "+it.getName()+" / id : "+it.getId()) + } + robustlyGetAll(image_wrp, "project", user_client).each{ + println("Project_name : "+it.getName()+" / id : "+it.getId()) + } } // if the image is part of a plate else { - WellWrapper well_wpr = image_wpr.getWells(user_client).get(0) - println ("Well_name : "+well_wpr.getName() +" / id : "+ well_wpr.getId()) - - def plate_wpr = image_wpr.getPlates(user_client).get(0) - println ("plate_name : "+plate_wpr.getName() + " / id : "+ plate_wpr.getId()) - - def screen_wpr = image_wpr.getScreens(user_client).get(0) - println ("screen_name : "+screen_wpr.getName() + " / id : "+ screen_wpr.getId()) + robustlyGetAll(image_wrp, "well", user_client).each{ + println ("Well_name : "+it.getName() +" / id : "+ it.getId()) + } + robustlyGetAll(image_wrp, "plate", user_client).each{ + println ("plate_name : "+it.getName() + " / id : "+ it.getId()) + } + robustlyGetAll(image_wrp, "screen", user_client).each{ + println ("screen_name : "+it.getName() + " / id : "+ it.getId()) + } } - ImagePlus imp = image_wpr.toImagePlus(user_client); + println "Getting image from OMERO" + + ImagePlus imp = robustlytoImagePlus(image_wrp, user_client) + // ImagePlus imp = IJ.openImage("/home/ldelisle/Desktop/EXP095_LE_PEG_CTGF_PLATE_120h.companion.ome [C2_1_merge].tif") if (!headless_mode) { imp.show() @@ -205,8 +539,13 @@ int nC = dim_array[2] int nT = dim_array[4] + // Get scale from omero + PixelsWrapper pixels = image_wrp.getPixels() + LengthI pixel_size = pixels.getPixelSizeX() + Double scale = pixel_size.getValue() + String scale_unit = pixel_size.getUnit().toString() + // Find the Greys channel: int ilastik_input_ch = 1 - // Find the Greys channel: ImageProcessor ip if (nC > 1) { for (int i = 1; i <= nC; i ++) { @@ -223,201 +562,418 @@ } } } - File output_path = new File (output_directory, image_basename+"_ilastik_" + ilastik_project_short_name + "_output.tif" ) - ImagePlus predictions_imp - FileSaver fs - if(output_path.exists()) { - println "USING EXISTING ILASTIK OUTPUT" - predictions_imp = IJ.openImage( output_path.toString() ) - } else { - /** - * ilastik - */ - println "Starting ilastik" - - // get ilastik predictions for each time point of the Time-lapse but all at the same time - ImagePlus ilastik_input_original = new Duplicator().run(imp, ilastik_input_ch, ilastik_input_ch, 1, 1, 1, nT); + // Define what will be defined in all cases: + double pixelWidth + ImagePlus mask_imp + List<ROIWrapper> updatedRoisW + List<Roi> updatedRois + // Or in both: + TableWrapper my_table + Boolean use_roi_name = true - ImagePlus gb_imp = ilastik_input_original.duplicate() - IJ.run(gb_imp, "Gaussian Blur...", "sigma=100 stack") - ImagePlus ilastik_input = ImageCalculator.run(ilastik_input_original, gb_imp, "Divide create 32-bit stack") - if (!headless_mode) {ilastik_input.show()} - // can't work without displaying image - // IJ.run("Run Pixel Classification Prediction", "projectfilename="+ilastik_project+" inputimage="+ilastik_input.getTitle()+" pixelclassificationtype=Probabilities"); - // - // to use in headless_mode more we need to use a commandservice - def predictions_imgPlus - if (ilastik_project_type == "Regular") { - predictions_imgPlus = cmds.run( IlastikPixelClassificationCommand.class, false, - 'inputImage', ilastik_input, - 'projectFileName', ilastik_project, - 'pixelClassificationType', "Probabilities").get().getOutput("predictions") - } else { - predictions_imgPlus = cmds.run( IlastikAutoContextCommand.class, false, - 'inputImage', ilastik_input, - 'projectFileName', ilastik_project, - 'AutocontextPredictionType', "Probabilities").get().getOutput("predictions") + if (use_existing || rescue) { + // get the list of image tables + // store the one with table_name + robustlyGetTables(image_wrp, user_client).each{ TableWrapper t_wrp -> + if (t_wrp.getName() == table_name){ + my_table = t_wrp + } + } + if (rescue && my_table == null) { + // We need to run the segmentation + use_existing = false + rescue = false + rois = robustlyGetROIs(image_wrp, user_client) + if (rois.size() > 0) { + // Clean existing ROIs + robustlyDeleteROIs(image_wrp, user_client, rois) + } + } else if (rescue && my_table != null) { + // We just need to generate a table + use_existing = true + get_spine = false + } else if ((!rescue) && my_table == null) { + throw new Exception("There is no table named " + table_name + " you need to rerun segmentation.") } - // to convert the result to ImagePlus : https://gist.github.com/GenevieveBuckley/460d0abc7c1b13eee983187b955330ba - predictions_imp = ImageJFunctions.wrap(predictions_imgPlus, "predictions") + } + if (!use_existing) { + // We compute the segmentation + if (segmentation_method == "ilastik") { + File output_path = new File (output_directory, image_basename+"_ilastik_" + ilastik_project_short_name + "_output.tif" ) + ImagePlus predictions_imp + FileSaver fs + if(output_path.exists()) { + println "USING EXISTING ILASTIK OUTPUT" + predictions_imp = IJ.openImage( output_path.toString() ) + } else { + /** + * ilastik + */ + println "Starting ilastik" - predictions_imp.setTitle("ilastik_output") + // get ilastik predictions for each time point of the Time-lapse but all at the same time + ImagePlus ilastik_input_original = new Duplicator().run(imp, ilastik_input_ch, ilastik_input_ch, 1, 1, 1, nT); - // save file - fs = new FileSaver(predictions_imp) - fs.saveAsTiff(output_path.toString() ) - } - if (!headless_mode) { predictions_imp.show() } + ImagePlus gb_imp = ilastik_input_original.duplicate() + IJ.run(gb_imp, "Gaussian Blur...", "sigma=100 stack") + ImagePlus ilastik_input = ImageCalculator.run(ilastik_input_original, gb_imp, "Divide create 32-bit stack") + if (!headless_mode) {ilastik_input.show()} + // can't work without displaying image + // IJ.run("Run Pixel Classification Prediction", "projectfilename="+ilastik_project+" inputimage="+ilastik_input.getTitle()+" pixelclassificationtype=Probabilities"); + // + // to use in headless_mode more we need to use a commandservice + def predictions_imgPlus + if (ilastik_project_type == "Regular") { + predictions_imgPlus = cmds.run( IlastikPixelClassificationCommand.class, false, + 'inputImage', ilastik_input, + 'projectFileName', ilastik_project, + 'pixelClassificationType', "Probabilities").get().getOutput("predictions") + } else { + predictions_imgPlus = cmds.run( IlastikAutoContextCommand.class, false, + 'inputImage', ilastik_input, + 'projectFileName', ilastik_project, + 'AutocontextPredictionType', "Probabilities").get().getOutput("predictions") + } + // to convert the result to ImagePlus : https://gist.github.com/GenevieveBuckley/460d0abc7c1b13eee983187b955330ba + predictions_imp = ImageJFunctions.wrap(predictions_imgPlus, "predictions") + + predictions_imp.setTitle("ilastik_output") - /** - * From the "ilastik predictions of the Time-lapse" do segmentation and cleaning - */ - // Get only the channel for the gastruloid/background prediction - ImagePlus mask_imp = new Duplicator().run(predictions_imp, ilastik_label_OI, ilastik_label_OI, 1, 1, 1, nT); - // This title will appear in the result table - mask_imp.setTitle(image_basename) - // Apply threshold: - IJ.setThreshold(mask_imp, probability_threshold, 100.0000); - Prefs.blackBackground = true; - IJ.run(mask_imp, "Convert to Mask", "method=Default background=Dark black"); - if (!headless_mode) { mask_imp.show() } + // save file + fs = new FileSaver(predictions_imp) + fs.saveAsTiff(output_path.toString() ) + } + if (!headless_mode) { predictions_imp.show() } - // clean the mask a bit - // Before we were doing: - // IJ.run(mask_ilastik_imp, "Options...", "iterations=10 count=3 black do=Open") - // Now: - // (Romain proposed 5 as radius_median) - println "Smoothing mask" - - // Here I need to check if we first fill holes or first do the median - IJ.run(mask_imp, "Median...", "radius=" + radius_median + " stack"); + /** + * From the "ilastik predictions of the Time-lapse" do segmentation and cleaning + */ - IJ.run(mask_imp, "Fill Holes", "stack"); + // Get a stack of ROI for background: + if (ilastik_label_BG != 0) { + ImagePlus mask_imp_BG = new Duplicator().run(predictions_imp, ilastik_label_BG, ilastik_label_BG, 1, 1, 1, nT) + // Apply threshold: + IJ.setThreshold(mask_imp_BG, probability_threshold_BG, 100.0000) + Prefs.blackBackground = true + IJ.run(mask_imp_BG, "Convert to Mask", "method=Default background=Dark black") + if (!headless_mode) { mask_imp_BG.show() } + IJ.run(mask_imp_BG, "Analyze Particles...", "stack show=Overlay") + Overlay ov_BG = mask_imp_BG.getOverlay() + Overlay ov_BG_Combined = new Overlay() + for (int t=1;t<=nT;t++) { + // Don't ask me why we need to refer to Z pos and not T/Frame + ArrayList<Roi> all_rois_inT = ov_BG.findAll{ roi -> roi.getZPosition() == t} + println "There are " + all_rois_inT.size() + " in time " + t + if (all_rois_inT.size() > 0) { + ShapeRoi current_roi = new ShapeRoi(all_rois_inT[0] as Roi) + for (i = 1; i < all_rois_inT.size(); i++) { + current_roi = current_roi.or(new ShapeRoi(all_rois_inT[i] as Roi)) + } + // Update the position before adding to the ov_BG_Combined + current_roi.setPosition( ilastik_input_ch, 1, t) + current_roi.setName("Background_t" + t) + ov_BG_Combined.add(current_roi) + } + } + IJ.run("Clear Results") + println "Store " + ov_BG_Combined.size() + " BG ROIs on OMERO" + // Save ROIs to omero + robustlysaveROIs(image_wrp, user_client, ROIWrapper.fromImageJ(ov_BG_Combined as List)) + } - // Get scale from omero - PixelsWrapper pixels = image_wpr.getPixels() - LengthI pixel_size = pixels.getPixelSizeX() - Double scale = pixel_size.getValue() - String scale_unit = pixel_size.getUnit().toString() - - // find gastruloids and measure them + // Get only the channel for the gastruloid/background prediction + mask_imp = new Duplicator().run(predictions_imp, ilastik_label_OI, ilastik_label_OI, 1, 1, 1, nT); - IJ.run("Set Measurements...", "area feret's perimeter shape display redirect=None decimal=3") - IJ.run("Set Scale...", "distance=1 known=" + scale + " unit=micron") - // Exclude the edge - IJ.run(mask_imp, "Analyze Particles...", "size=" + min_size_particle + "-Infinity stack exclude show=Overlay"); + // Apply threshold: + IJ.setThreshold(mask_imp, probability_threshold, 100.0000); + Prefs.blackBackground = true; + IJ.run(mask_imp, "Convert to Mask", "method=Default background=Dark black"); - println "Found " + rt.size() + " ROIs" + } else { + // Get only the channel with bright field + mask_imp = new Duplicator().run(imp, ilastik_input_ch, ilastik_input_ch, 1, 1, 1, nT); + // Run convert to mask + (new Thresholder()).convertStackToBinary(mask_imp); + } + // This title will appear in the result table + mask_imp.setTitle(image_basename) + if (!headless_mode) { mask_imp.show() } - // make a "clean" mask - newMask_imp = IJ.createImage("CleanMask", "8-bit black", imp.getWidth(), imp.getHeight(), nT); - if (nT > 1) { - HyperStackConverter.toHyperStack(newMask_imp, 1, 1, nT, "xyctz", "Color"); - } - if (!headless_mode) {newMask_imp.show()} + // clean the mask a bit + // Before we were doing: + // IJ.run(mask_ilastik_imp, "Options...", "iterations=10 count=3 black do=Open") + // Now: + // (Romain proposed 5 as radius_median) + println "Smoothing mask" + + // Here I need to check if we first fill holes or first do the median + IJ.run(mask_imp, "Median...", "radius=" + radius_median + " stack"); + + IJ.run(mask_imp, "Fill Holes", "stack"); + + // find gastruloids and measure them + + IJ.run("Set Measurements...", "area feret's perimeter shape display redirect=None decimal=3") + IJ.run("Set Scale...", "distance=1 known=" + scale + " unit=micron") + pixelWidth = mask_imp.getCalibration().pixelWidth + println "pixelWidth is " + pixelWidth + // Exclude the edge + IJ.run(mask_imp, "Analyze Particles...", "size=" + min_size_particle + "-Infinity stack exclude show=Overlay"); + + println "Found " + rt.size() + " ROIs" - Overlay ov = mask_imp.getOverlay() - // Let's keep only the largest area for each time: - Overlay clean_overlay = new Overlay() - Roi largest_roi_inT - for (int t=1;t<=nT;t++) { - // Don't ask me why we need to refer to Z pos and not T/Frame - ArrayList<Roi> all_rois_inT = ov.findAll{ roi -> roi.getZPosition() == t} - println "There are " + all_rois_inT.size() + " in time " + t - if (all_rois_inT.size() == 0) { - // We arbitrary design a ROI of size 1x1 - largest_roi_inT = new Roi(0,0,1,1) - largest_roi_inT.setName("GastruloidNotFound_t" + t) + Overlay ov = mask_imp.getOverlay() + // We store in clean_overlay all gastruloids to measure + // They must have names and appropriate position + Overlay clean_overlay = new Overlay() + if (keep_only_largest) { + // Let's keep only the largest area for each time: + Roi largest_roi_inT + for (int t=1;t<=nT;t++) { + // Don't ask me why we need to refer to Z pos and not T/Frame + ArrayList<Roi> all_rois_inT = ov.findAll{ roi -> roi.getZPosition() == t} + println "There are " + all_rois_inT.size() + " in time " + t + if (all_rois_inT.size() > 0) { + largest_roi_inT = Collections.max(all_rois_inT, Comparator.comparing((roi) -> roi.getStatistics().area )) + largest_roi_inT.setName("Gastruloid_t" + t + "_id1") + } else { + // We arbitrary design a ROI of size 1x1 + largest_roi_inT = new Roi(0,0,1,1) + largest_roi_inT.setName("GastruloidNotFound_t" + t) + } + // Update the position before adding to the clean_overlay + largest_roi_inT.setPosition( ilastik_input_ch, 1, t) + clean_overlay.add(largest_roi_inT) + } } else { - largest_roi_inT = Collections.max(all_rois_inT, Comparator.comparing((roi) -> roi.getStatistics().area )) - largest_roi_inT.setName("Gastruloid_t" + t) + // We keep all + // We store the last number given: + int[] lastID = new int[nT] + ov.each{ Roi roi -> + // Don't ask me why we need to refer to Z pos and not T/Frame + t = roi.getZPosition() + id = lastID[t - 1] + 1 + roi.setName("Gastruloid_t" + t + "_id" + id) + // Increase lastID: + lastID[t - 1] += 1 + // Update the position before adding to the clean_overlay + roi.setPosition( ilastik_input_ch, 1, t) + clean_overlay.add(roi) + } + // Fill timepoints with no ROI with notfound: + Roi roi + for (int t=1;t<=nT;t++) { + if (lastID[t - 1] == 0) { + // We arbitrary design a ROI of size 1x1 + roi = new Roi(0,0,1,1) + roi.setName("GastruloidNotFound_t" + t) + // Update the position before adding to the clean_overlay + roi.setPosition( ilastik_input_ch, 1, t) + clean_overlay.add(roi) + } + + } } - // Fill the frame t with the largest_roi_inT - newMask_imp.setT(t) - Overlay t_ov = new Overlay(largest_roi_inT) - t_ov.fill(newMask_imp, Color.white, Color.black) - // Update the position before adding to the clean_overlay - largest_roi_inT.setPosition( ilastik_input_ch, 1, t) - clean_overlay.add(largest_roi_inT) - } + // Measure this new overlay: + rt = clean_overlay.measure(imp) + + // Get Date + Date date = new Date() + String now = date.format("yyyy-MM-dd_HH-mm") - // Measure this new overlay: - rt = clean_overlay.measure(imp) + // Add Date, version and params + for ( int row = 0;row<rt.size();row++) { + rt.setValue("Unit", row, scale_unit) + rt.setValue("Date", row, now) + rt.setValue("Version", row, tool_version) + if (segmentation_method == "ilastik") { + rt.setValue("IlastikProject", row, ilastik_project_short_name) + rt.setValue("ProbabilityThreshold", row, probability_threshold) + } else { + rt.setValue("IlastikProject", row, "NA") + rt.setValue("ProbabilityThreshold", row, "NA") + } + rt.setValue("MinSizeParticle", row, min_size_particle) + rt.setValue("MinDiameter", row, minimum_diameter) + rt.setValue("ClosenessTolerance", row, closeness_tolerance) + rt.setValue("MinSimilarity", row, min_similarity) + rt.setValue("RadiusMedian", row, radius_median) + String label = rt.getLabel(row) + rt.setValue("BaseImage", row, label.split(":")[0]) + rt.setValue("ROI", row, label.split(":")[1]) + // In simple-omero-client + // Strings that can be converted to double are stored in double + // in omero so to create the super_table we need to store all + // them as Double: + rt.setValue("Time", row, label.split(":")[1].split("_t")[-1].split("_id")[0] as Double) + rt.setValue("ROI_type", row, label.split(":")[1].split("_t")[0]) + Roi current_roi = clean_overlay[row] + Double[] centroid = current_roi.getContourCentroid() + rt.setValue("XCentroid", row, centroid[0]) + rt.setValue("YCentroid", row, centroid[1]) + assert label.split(":")[1] == current_roi.getName() : "Name in ov does not match name in rt"; + } + println "Store " + clean_overlay.size() + " ROIs on OMERO" + // Save ROIs to omero + robustlysaveROIs(image_wrp, user_client, ROIWrapper.fromImageJ(clean_overlay as List)) - assert rt.size() == nT: "Was expecting as many entry as time points" - - // Get Date - Date date = new Date() - String now = date.format("yyyy-MM-dd_HH-mm") + // Get them back with IDs: + updatedRoisW = robustlyGetROIs(image_wrp, user_client) + updatedRois = ROIWrapper.toImageJ(updatedRoisW, "ROI") + } else { + // reinitialize the rt + rt = new ResultsTable() + // The first column (index 0) of the result table is the image ID + // The second column (index 1) is the ROI ID + // Add all others values + for (icol = 2; icol < my_table.getColumnCount(); icol ++) { + colname = my_table.getColumnName(icol) + for (row = 0; row < my_table.getRowCount(); row ++) { + rt.setValue(colname, row, my_table.getData(row, icol)) + } + } + // Add ROI column + use_roi_name = false + for ( int row = 0;row<rt.size();row++) { + rt.setValue("ROI", row, my_table.getData(row, 1).getId()) + } + if (!rescue) { + // Get the ROI ids associated with the measures of the table + Long[] gastruloid_roi_ids = (my_table.getData()[1]).collect{ + it.getId() + } + // Sort the array: + Arrays.sort(gastruloid_roi_ids) + // Get Date + Date date = new Date() + String now = date.format("yyyy-MM-dd_HH-mm") - // Add Date, version and params - for ( int row = 0;row<rt.size();row++) { - rt.setValue("Unit", row, scale_unit) - rt.setValue("Date", row, now) - rt.setValue("Version", row, tool_version) - rt.setValue("IlastikProject", row, ilastik_project_short_name) - rt.setValue("ProbabilityThreshold", row, probability_threshold) - rt.setValue("MinSizeParticle", row, min_size_particle) - rt.setValue("MinDiameter", row, minimum_diameter) - rt.setValue("ClosenessTolerance", row, closeness_tolerance) - rt.setValue("MinSimilarity", row, min_similarity) - rt.setValue("RadiusMedian", row, radius_median) - String label = rt.getLabel(row) - rt.setValue("BaseImage", row, label.split(":")[0]) - rt.setValue("ROI", row, label.split(":")[1]) - // In simple-omero-client - // Strings that can be converted to double are stored in double - // in omero so to create the super_table we need to store all - // them as Double: - rt.setValue("Time", row, label.split(":")[1].split("_t")[-1] as Double) - rt.setValue("ROI_type", row, label.split(":")[1].split("_t")[0]) + // Add Date, version and params + for ( int row = 0;row<rt.size();row++) { + rt.setValue("Date_rerun_spine", row, now) + rt.setValue("Version_rerun_spine", row, tool_version) + rt.setValue("MinDiameter", row, minimum_diameter) + rt.setValue("ClosenessTolerance", row, closeness_tolerance) + rt.setValue("MinSimilarity", row, min_similarity) + } + // Remove any roi which is not gastruloid: + println "Remove ROIs other than segmentation results and tables" + // In order to reduce the number of 'servantsPerSession' + // Which reached 10k and then caused failure + // I store them in a list + ArrayList<ROIWrapper> ROIW_list_to_delete = [] + robustlyGetROIs(image_wrp, user_client).each{ + if (Arrays.binarySearch(gastruloid_roi_ids, it.getId()) < 0) { + // user_client.delete(it) + String roi_name = it.toImageJ().get(0).getName() + if (!roi_name.startsWith("Background_t")) { + ROIW_list_to_delete.add(it) + } + } + } + // Then I should use + // user_client.delete(ROIW_list_to_delete) + // Because of https://github.com/GReD-Clermont/simple-omero-client/issues/59 + // I use + if (ROIW_list_to_delete.size() > 0) { + robustlyDeleteROIs(image_wrp, user_client, ROIW_list_to_delete) + } + robustlyDeleteTables(image_wrp, user_client) + + // Retrieve the ROIs from omero: + updatedRoisW = robustlyGetROIs(image_wrp, user_client) + updatedRois = ROIWrapper.toImageJ(updatedRoisW, "ROI") + // Create a clean mask + mask_imp = IJ.createImage("CleanMask", "8-bit black", imp.getWidth(), imp.getHeight(), nT); + if (nT > 1) { + HyperStackConverter.toHyperStack(mask_imp, 1, 1, nT, "xyctz", "Color"); + } + if (!headless_mode) {mask_imp.show()} + for (roi in updatedRois) { + t = roi.getTPosition() + Overlay t_ov = new Overlay(roi) + // Fill the frame t with the roi + mask_imp.setT(t) + t_ov.fill(mask_imp, Color.white, Color.black) + } + IJ.run("Set Scale...", "distance=1 known=" + scale + " unit=micron") + pixelWidth = mask_imp.getCalibration().pixelWidth + println "pixelWidth is " + pixelWidth + + } else { + // Retrieve the ROIs from omero: + updatedRoisW = robustlyGetROIs(image_wrp, user_client) + updatedRois = ROIWrapper.toImageJ(updatedRoisW, "ROI") + } } - println "Remove existing ROIs on OMERO" - // Remove existing ROIs - image_wpr.getROIs(user_client).each{ user_client.delete(it) } - println "Store " + clean_overlay.size() + " ROIs on OMERO" - // Save ROIs to omero - image_wpr.saveROIs(user_client, ROIWrapper.fromImageJ(clean_overlay as List)) - - // Get them back with IDs: - List<Roi> updatedRois = ROIWrapper.toImageJ(image_wpr.getROIs(user_client), "ROI") if (get_spine) { - /** - * The MaxInscribedCircles magic is here - */ - isSelectionOnly = false - isGetSpine = true - appendPositionToName = true - MaxInscribedCircles mic = MaxInscribedCircles.builder(newMask_imp) - .minimumDiameter(minimum_diameter) - .useSelectionOnly(isSelectionOnly) - .getSpine(isGetSpine) - .spineClosenessTolerance(closeness_tolerance) - .spineMinimumSimilarity(min_similarity) - .appendPositionToName(appendPositionToName) - .build() - println "Get spines" - mic.process() - List<Roi> all_circles = mic.getCircles(); - List<Roi> all_spines = mic.getSpines(); + // println use_roi_name + // Scan ROIs and + // Put them in HashMap + Map<String, Roi> gastruloid_rois = new HashMap<>() + for (roi_i = 0; roi_i < updatedRois.size(); roi_i ++) { + Roi roi = updatedRois[roi_i] + roi_name = roi.getName() + if (roi_name.toLowerCase().startsWith("gastruloid") && !roi_name.toLowerCase().startsWith("gastruloidnotfound")) { + // println "Putting " + roi_name + " in table." + if (use_roi_name) { + assert !gastruloid_rois.containsKey(roi_name); "Duplicated gastruloid ROI name" + gastruloid_rois.put(roi_name, roi) + } else { + // println "ID is: " + updatedRoisW[roi_i].getId() + gastruloid_rois.put(updatedRoisW[roi_i].getId(), roi) + } + } + } + for (int row = 0 ; row < rt.size();row++) { + String roi_type = rt.getStringValue("ROI_type", row) + if (roi_type == "Gastruloid") { + // Find the corresponding ROI + String roi_name + Roi current_roi + if (use_roi_name) { + roi_name = rt.getStringValue("ROI", row) + current_roi = gastruloid_rois.get(roi_name) + } else { + Long roi_id = rt.getValue("ROI", row) as int + current_roi = gastruloid_rois.get(roi_id) + roi_name = current_roi.getName() + } + println roi_name - /** - * For each Time-point, find the : - * - the largest cicle - * - the spine, and the coordinates of end-points - * Measure distances and inverses spine roi if necessary - * Add value to table with Elongation Index - */ - double pixelWidth = mask_imp.getCalibration().pixelWidth + assert current_roi != null; "The ROI of row " + row + "is not on OMERO" + t = current_roi.getTPosition() + assert t == rt.getValue("Time", row) as int; "T position does not match Time in rt" + /** + * The MaxInscribedCircles magic is here + */ + + ImagePlus mask_imp_single = new Duplicator().run(mask_imp, 1, 1, 1, 1, t, t) + mask_imp_single.setRoi(current_roi) + + isSelectionOnly = true + isGetSpine = true + appendPositionToName = false + MaxInscribedCircles mic = MaxInscribedCircles.builder(mask_imp_single) + .minimumDiameter(minimum_diameter) + .useSelectionOnly(isSelectionOnly) + .getSpine(isGetSpine) + .spineClosenessTolerance(closeness_tolerance) + .spineMinimumSimilarity(min_similarity) + .appendPositionToName(appendPositionToName) + .build() + println "Get spines" + mic.process() + List<Roi> circles_t = mic.getCircles() + Roi spine_roi = mic.getSpines()[0] - for (int row = 0 ; row < rt.size();row++) { - - int t = rt.getValue("Time", row) as int - println "#############"+t - String roi_type = rt.getStringValue("ROI_type", row) - - if (roi_type == "Gastruloid") { - List<Roi> circles_t = all_circles.findAll{ roi -> roi.getName().endsWith("P_"+t)} + /** + * For each Time-point, find the : + * - the largest cicle + * - the spine, and the coordinates of end-points + * Measure distances and inverses spine roi if necessary + * Add value to table with Elongation Index + */ if (circles_t.size() > 0) { Roi largestCircle_roi = circles_t[0] @@ -430,22 +986,25 @@ ArrayList<Roi> rois_to_add_to_omero rt.setValue("LargestRadius", row, circle_roi_radius * pixelWidth) if (debug) { - circles_t.each{it.setPosition(ilastik_input_ch, 1, t)} + circles_t.each{ + it.setPosition(ilastik_input_ch, 1, t) + it.setName(it.getName() + "_" + roi_name) + } // First put all circles to omero: - image_wpr.saveROIs(user_client, ROIWrapper.fromImageJ(circles_t as List)) + robustlysaveROIs(image_wrp, user_client, ROIWrapper.fromImageJ(circles_t as List)) if (!headless_mode) { (circles_t as List).each{ rm.addRoi(it)} } } else { // First put the largest circle to omero: - image_wpr.saveROIs(user_client, ROIWrapper.fromImageJ([largestCircle_roi] as List)) + largestCircle_roi.setName(largestCircle_roi.getName() + "_" + roi_name) + robustlysaveROIs(image_wrp, user_client, ROIWrapper.fromImageJ([largestCircle_roi] as List)) if (!headless_mode) { rm.addRoi(largestCircle_roi) } } // get the Spine, and its points - Roi spine_roi = all_spines.findAll{ roi -> roi.getName().endsWith("P_"+t)}[0] println "Spine is " + spine_roi if (spine_roi != null){ //println spine_roi @@ -475,7 +1034,8 @@ rt.setValue("SpineLength", row, line_roi_length * pixelWidth) rt.setValue("ElongationIndex", row, line_roi_length / (2*circle_roi_radius)) spine_roi.setPosition( ilastik_input_ch, 1, t) - image_wpr.saveROIs(user_client, ROIWrapper.fromImageJ([spine_roi] as List)) + spine_roi.setName(spine_roi.getName() + "_" + roi_name) + robustlysaveROIs(image_wrp, user_client, ROIWrapper.fromImageJ([spine_roi] as List)) if (!headless_mode) { rm.addRoi(spine_roi) } @@ -495,29 +1055,23 @@ } } } - // get the list of image tables - // remove the one with table_name - image_wpr.getTables(user_client).each{ TableWrapper t_wpr -> - if (t_wpr.getName() == table_name){ - user_client.delete(t_wpr) - } - } // Create an omero table: println "Create an omero table" - TableWrapper table_wpr = new TableWrapper(user_client, rt, image_wpr.getId(), updatedRois, "ROI") + TableWrapper table_wrp = robustlyNewTableWrapper(user_client, rt, image_wrp.getId(), updatedRois, "ROI") - // upload the table on OMERO - table_wpr.setName(table_name) - image_wpr.addTable(user_client, table_wpr) - + if (!rescue) { + // upload the table on OMERO + table_wrp.setName(table_name) + robustlyAddAndReplaceTable(image_wrp, user_client, table_wrp) + } // add the same infos to the super_table if (super_table == null) { println "super_table is null" - super_table = table_wpr + super_table = table_wrp } else { println "adding rows" - super_table.addRows(user_client, rt, image_wpr.getId(), updatedRois, "ROI") + robustlyAddRows(super_table, user_client, rt, image_wrp.getId(), updatedRois, "ROI") } println super_table.getRowCount() println "Writting measurements to file" @@ -525,7 +1079,7 @@ // Put all ROIs in overlay: Overlay global_overlay = new Overlay() - ROIWrapper.toImageJ(image_wpr.getROIs(user_client), "ROI").each{ + ROIWrapper.toImageJ(robustlyGetROIs(image_wrp, user_client), "ROI").each{ global_overlay.add(it) } @@ -542,7 +1096,7 @@ // In simple-omero-client // Strings that can be converted to double are stored in double // In order to build the super_table, tool_version should stay String -String tool_version = "Phase_v20230324" +String tool_version = "White_v20231220" // User set variables @@ -556,6 +1110,9 @@ #@ Long(label="ID", value=119273) id #@ String(visibility=MESSAGE, value="Parameters for segmentation/ROI", required=false) msg2 +#@ Boolean(label="Use existing segmentation (values below in the section will be ignored)") use_existing +#@ String(label="Segmentation Method", choices={"convert_to_mask","ilastik"}) segmentation_method +#@ Boolean(label="<html>Run in rescue mode<br/>(only segment images without tables)</html>", value=false) rescue #@ File(label="Ilastik project") ilastik_project #@ String(label="Ilastik project short name") ilastik_project_short_name #@ String(label="Ilastik project type", choices={"Regular", "Auto-context"}, value="Regular") ilastik_project_type @@ -563,17 +1120,31 @@ #@ Double(label="Probability threshold for ilastik", min=0, max=1, value=0.65) probability_threshold #@ Double(label="Radius for median (=smooth the mask)", min=1, value=20) radius_median #@ Integer(label="Minimum surface for Analyze Particle", value=5000) min_size_particle +#@ Boolean(label="Keep only one gastruloid per timepoint", value=true) keep_only_largest -#@ String(visibility=MESSAGE, value="Parameters for elongation index", required=false) msg3 +#@ String(visibility=MESSAGE, value="Parameters for segmentation/ROI of background", required=false) msg3 +#@ Integer(label="Ilastik label of background (put 0 if not present)", min=0, value=1) ilastik_label_BG +#@ Double(label="Probability threshold for background in ilastik", min=0, max=1, value=0.8) probability_threshold_BG + +#@ String(visibility=MESSAGE, value="Parameters for elongation index", required=false) msg4 #@ Boolean(label="Compute spine", value=true) get_spine #@ Integer(label="Minimum diameter of inscribed circles", min=0, value=20) minimum_diameter #@ Integer(label="Closeness Tolerance (Spine)", min=0, value=50) closeness_tolerance #@ Double(label="Min similarity (Spine)", min=-1, max=1, value=0.1) min_similarity -#@ String(visibility=MESSAGE, value="Parameters for output", required=false) msg4 +#@ String(visibility=MESSAGE, value="Parameters for output", required=false) msg5 #@ File(style = "directory", label="Directory where measures are put") output_directory #@ Boolean(label="<html>Run in debug mode<br/>(get all inscribed circles)</html>", value=false) debug +// Handle incompatibilities: +if (rescue && use_existing) { + throw new Exception("rescue and use_existing modes are incompatible") +} +if (use_existing && !get_spine) { + throw new Exception("use_existing mode requires get_spine") +} + + #@ ResultsTable rt #@ CommandService cmds #@ ConvertService cvts @@ -595,7 +1166,9 @@ rm = rm.getRoiManager() rm.reset() // Reset the table - rt.reset() + if (rt != null) { + rt.reset() + } } if (PASSWORD == "") { @@ -627,27 +1200,45 @@ switch (object_type) { case "image": - ImageWrapper image_wr - try { - image_wpr = user_client.getImage(id) - } catch(Exception e) { - throw Exception("Could not retrieve the image, please check the id.") + ImageWrapper image_wrp = robustlyGetOne(id, "image", user_client) + if (!use_existing) { + List<TableWrapper> tables = robustlyGetTables(image_wrp, user_client) + if (!tables.isEmpty()) { + throw new Exception("There should be no table associated to the image before segmentation. Please clean the image.") + } + if (!rescue) { + List<ROIWrapper> rois = robustlyGetROIs(image_wrp, user_client) + if (!rois.isEmpty()) { + throw new Exception("There should be no ROIs associated to the image before segmentation. Please clean the image.") + } + } } - processImage(user_client, image_wpr, - ilastik_project, ilastik_project_type, - ilastik_label_OI, - probability_threshold, radius_median, min_size_particle, - get_spine, minimum_diameter, closeness_tolerance, min_similarity, - ilastik_project_short_name, - output_directory, - headless_mode, debug, tool_version) + processImage(user_client, image_wrp, + ilastik_project, ilastik_project_type, + ilastik_label_OI, + probability_threshold, radius_median, min_size_particle, + get_spine, minimum_diameter, closeness_tolerance, min_similarity, + ilastik_project_short_name, + output_directory, + headless_mode, debug, tool_version, + use_existing, "image", rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) break case "dataset": - DatasetWrapper dataset_wrp - try { - dataset_wrp = user_client.getDataset(id) - } catch(Exception e) { - throw Exception("Could not retrieve the dataset, please check the id.") + DatasetWrapper dataset_wrp = robustlyGetOne(id, "dataset", user_client) + if (use_existing) { + // Remove the tables associated to the dataset + robustlyDeleteTables(dataset_wrp, user_client) + } else if (rescue) { + List<TableWrapper> tables = robustlyGetTables(dataset_wrp, user_client) + if (!tables.isEmpty()) { + throw new Exception("There should be no table associated to the dataset before running rescue mode.") + } + } else { + if (robustlyHasAnyTable(dataset_wrp, "dataset", user_client) || robustlyHasAnyROI(dataset_wrp, user_client)) { + throw new Exception("ROI or table found in dataset or images. They should be deleted before running analysis.") + } } processDataset(user_client, dataset_wrp, ilastik_project, ilastik_project_type, @@ -656,24 +1247,28 @@ get_spine, minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) - // get the list of dataset tables - // remove the one with table_name - dataset_wrp.getTables(user_client).each{ TableWrapper t_wpr -> - if (t_wpr.getName() == table_name + "_global"){ - user_client.delete(t_wpr) - } - } + headless_mode, debug, tool_version, + use_existing, "dataset", rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) // upload the table on OMERO super_table.setName(table_name + "_global") - dataset_wrp.addTable(user_client, super_table) + robustlyAddAndReplaceTable(dataset_wrp, user_client, super_table) break case "well": - WellWrapper well_wrp - try { - well_wrp = user_client.getWells(id)[0] - } catch(Exception e) { - throw Exception("Could not retrieve the well, please check the id.") + WellWrapper well_wrp = robustlyGetOne(id, "well", user_client) + if (use_existing) { + // Remove the tables associated to the well + robustlyDeleteTables(well_wrp, user_client) + } else if (rescue) { + List<TableWrapper> tables = robustlyGetTables(well_wrp, user_client) + if (!tables.isEmpty()) { + throw new Exception("There should be no table associated to the well before running rescue mode.") + } + } else { + if (robustlyHasAnyTable(well_wrp, "well", user_client) || robustlyHasAnyROI(well_wrp, user_client)) { + throw new Exception("ROI or table found in well or images. They should be deleted before running analysis.") + } } processSingleWell(user_client, well_wrp, ilastik_project, ilastik_project_type, @@ -682,24 +1277,28 @@ get_spine, minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) - // get the list of well tables - // remove the one with table_name - well_wrp.getTables(user_client).each{ TableWrapper t_wpr -> - if (t_wpr.getName() == table_name + "_global"){ - user_client.delete(t_wpr) - } - } + headless_mode, debug, tool_version, + use_existing, "well", rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) // upload the table on OMERO super_table.setName(table_name + "_global") - well_wrp.addTable(user_client, super_table) + robustlyAddAndReplaceTable(well_wrp, user_client, super_table) break case "plate": - PlateWrapper plate_wrp - try { - plate_wrp = user_client.getPlates(id)[0] - } catch(Exception e) { - throw Exception("Could not retrieve the plate, please check the id.") + PlateWrapper plate_wrp = robustlyGetOne(id, "plate", user_client) + if (use_existing) { + // Remove the tables associated to the plate + robustlyDeleteTables(plate_wrp, user_client) + } else if (rescue) { + List<TableWrapper> tables = robustlyGetTables(plate_wrp, user_client) + if (!tables.isEmpty()) { + throw new Exception("There should be no table associated to the plate before running rescue mode.") + } + } else { + if (robustlyHasAnyTable(plate_wrp, "plate", user_client) || robustlyHasAnyROI(plate_wrp, user_client)) { + throw new Exception("ROI or table found in plate or images. They should be deleted before running analysis.") + } } processSinglePlate(user_client, plate_wrp, ilastik_project, ilastik_project_type, @@ -708,22 +1307,19 @@ get_spine, minimum_diameter, closeness_tolerance, min_similarity, ilastik_project_short_name, output_directory, - headless_mode, debug, tool_version) - // get the list of well tables - // remove the one with table_name - plate_wrp.getTables(user_client).each{ TableWrapper t_wpr -> - if (t_wpr.getName() == table_name + "_global"){ - user_client.delete(t_wpr) - } - } + headless_mode, debug, tool_version, + use_existing, "plate", rescue, + ilastik_label_BG, probability_threshold_BG, + keep_only_largest, segmentation_method) // upload the table on OMERO super_table.setName(table_name + "_global") - plate_wrp.addTable(user_client, super_table) + robustlyAddAndReplaceTable(plate_wrp, user_client, super_table) break } } catch(Exception e) { println("Something went wrong: " + e) + e.printStackTrace() if (headless_mode){ // This is due to Rank Filter + GaussianBlur @@ -739,7 +1335,7 @@ } } else { - println "Not able to connect to " + host + throw new Exception("Not able to connect to " + host) } return
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/README.md Wed Dec 20 20:07:13 2023 +0000 @@ -0,0 +1,35 @@ +# OMERO hyperstack to gastruloid measurements + +## CHANGELOG + +### 20231220 + +- Add a new parameter: segmentation_method which can be 'ilastik' or 'convert_to_mask'. If 'convert_to_mask' is chosen, it does an autothreshold. +- The tool_version has been changed from Phase to White + +### 20230728 + +- Add a new parameter: keep_only_largest which allows to keep only the largest ROI for each stack + +### 20230727 + +- Add new parameters (ilastik_label_BG and probability_threshold_BG) to be able to generate a ROI for background. +- Add XCentroid and YCentroid to the result table + +### 20230628 + +- Change RoiWrapper to ROIWrapper + +### 20230623 + +- Be more robust to OMERO reboot: + - 'rescue' allows to only process images which does not have ROIs and tables and generate final table + - When making a query to omero repeat it after 0 minutes if it fails and again with 10, 60, 360, 600. + +### 20230405 + +- New parameter 'use_existing' allows to recompute only the spine + +### 20230324 + +First release
--- a/omero_hyperstack_to_gastruloid_measurements.xml Fri Mar 24 13:03:39 2023 +0000 +++ b/omero_hyperstack_to_gastruloid_measurements.xml Wed Dec 20 20:07:13 2023 +0000 @@ -1,6 +1,49 @@ <tool id="omero_hyperstack_to_gastruloid_measurements" name="Omero hyperstack to Gastruloid measurements" profile="20.01" version="@TOOL_VERSION@+galaxy0"> <macros> - <token name="@TOOL_VERSION@">20230324</token> + <token name="@TOOL_VERSION@">20231220</token> + <xml name="segmentation"> + <conditional name="use_ilastik"> + <param name="segmentation_method" type="select" label="Segmentation method"> + <option value="convert_to_mask">Simply convert to mask</option> + <option value="ilastik">Use an ilastik project</option> + </param> + <when value="ilastik"> + <param name="ilastik_project" type="data" format="h5" label="Ilastik project" /> + <param name="ilastik_project_type" type="select" label="Type of Ilastik project"> + <option value="Regular">Regular</option> + <option value="Auto-context">Auto-context</option> + </param> + <param name="ilastik_label_OI" type="integer" value="3" label="Index of label of interest in Ilastik project" /> + <param name="probability_threshold" type="float" min="0" max="1" value="0.4" label="Probability threshold for ilastik" /> + <conditional name="background"> + <param name="get_background" type="select" label="Get ROI with background" help="Can be useful to normalize fluo"> + <option value="yes">Yes</option> + <option value="no">No</option> + </param> + <when value="yes"> + <param name="ilastik_label_BG" type="integer" value="1" label="Index of label of background in Ilastik project" /> + <param name="probability_threshold_BG" type="float" min="0" max="1" value="0.8" label="Probability threshold for background in ilastik" /> + </when> + <when value="no"> + <param name="ilastik_label_BG" type="hidden" value="0"/> + <param name="probability_threshold_BG" type="hidden" value="0"/> + </when> + </conditional> + </when> + <when value="convert_to_mask"> + <param name="ilastik_project_type" type="hidden" value="Regular" /> + <param name="ilastik_label_OI" type="hidden" value="3" /> + <param name="probability_threshold" type="hidden" value="0" /> + <section name="background" title="" > + <param name="ilastik_label_BG" type="hidden" value="0"/> + <param name="probability_threshold_BG" type="hidden" value="0"/> + </section> + </when> + </conditional> + <param name="radius_median" type="float" value="20" label="Radius for median (=smooth the mask)" /> + <param name="min_size_particle" type="integer" min="0" value="5000" label="Minimum surface for Analyze Particle" /> + <param name="keep_only_largest" type="boolean" truevalue="true" falsevalue="false" checked="true" label="Keep only one gastruloid per timepoint" /> + </xml> </macros> <requirements> <requirement type="package" version="20220414">fiji</requirement> @@ -20,16 +63,24 @@ echo "OMERO connection credentials are empty. Set your credentials via: User -> Preferences -> Manage Information" 1>&2 && exit 1 && #end if + #if str($mode.use_ilastik.segmentation_method) == "ilastik": + #set $ilastik_project_file = str($mode.use_ilastik.ilastik_project) + #set $ilastik_project_name = str($mode.use_ilastik.ilastik_project.name) + #else + #set $ilastik_project_file = "inexisting.ilp" + #set $ilastik_project_name = "inexisting.ilp" + #end if + mkdir output && ## Because ilastik wants to write to ${HOME}/.cache and ${HOME}/.config export HOME=`pwd` && ImageJ-ilastik --ij2 --headless --console --run '$__tool_directory__/'1-omero_timelapse_image_to_measurements_phase.groovy - 'USERNAME="",PASSWORD="",credentials="${credentials}",host="${omero_host}",port="${omero_port}",object_type="${omero_object.object_type}",id="${omero_object.omero_id}",ilastik_project="${ilastik_project}",ilastik_project_short_name="${ilastik_project.name}",ilastik_project_type="${ilastik_project_type}",ilastik_label_OI="${ilastik_label_OI}",probability_threshold="${probability_threshold}",radius_median="${radius_median}",min_size_particle="${min_size_particle}",get_spine="true",minimum_diameter="${minimum_diameter}",closeness_tolerance="${closeness_tolerance}",min_similarity="${min_similarity}",output_directory="output",debug="${debug}"' > output.log + 'USERNAME="",PASSWORD="",credentials="${credentials}",host="${omero_host}",port="${omero_port}",object_type="${omero_object.object_type}",id="${omero_object.omero_id}",segmentation_method="${mode.use_ilastik.segmentation_method}",use_existing="${mode.use_existing}",ilastik_project="$ilastik_project_file",ilastik_project_short_name="$ilastik_project_name",ilastik_project_type="${mode.use_ilastik.ilastik_project_type}",ilastik_label_OI="${mode.use_ilastik.ilastik_label_OI}",probability_threshold="${mode.use_ilastik.probability_threshold}",radius_median="${mode.radius_median}",min_size_particle="${mode.min_size_particle}",get_spine="true",minimum_diameter="${minimum_diameter}",closeness_tolerance="${closeness_tolerance}",min_similarity="${min_similarity}",output_directory="output",debug="${debug}",rescue="${mode.rescue}",ilastik_label_BG="${mode.use_ilastik.background.ilastik_label_BG}",probability_threshold_BG="${mode.use_ilastik.background.probability_threshold_BG}",keep_only_largest="${mode.keep_only_largest}"' > output.log ]]> </command> <configfiles> <configfile name="credentials"><![CDATA[#set $username = $__user__.extra_preferences.get('omero_account|username', "") - #set $password = $__user__.extra_preferences.get('omero_account|password', "") +#set $password = $__user__.extra_preferences.get('omero_account|password', "") $username $password ]]></configfile> @@ -60,15 +111,40 @@ <param name="omero_id" type="integer" value="" label="Dataset ID on omero" /> </when> </conditional> - <param name="ilastik_project" type="data" format="h5" label="Ilastik project" /> - <param name="ilastik_project_type" type="select" label="Type of Ilastik project"> - <option value="Regular">Regular</option> - <option value="Auto-context">Auto-context</option> - </param> - <param name="ilastik_label_OI" type="integer" value="3" label="Index of label of interest in Ilastik project" /> - <param name="probability_threshold" type="float" min="0" max="1" value="0.4" label="Probability threshold for ilastik" /> - <param name="radius_median" type="float" value="20" label="Radius for median (=smooth the mask)" /> - <param name="min_size_particle" type="integer" min="0" value="5000" label="Minimum surface for Analyze Particle" /> + <conditional name="mode"> + <param name="mode_select" type="select" label="Which mode do you want to use?" > + <option value="regular">Regular (from scratch)</option> + <option value="spine_only">Recompute only spine</option> + <option value="rescue">Rescue (if Regular did not go to the end)</option> + </param> + <when value="regular"> + <expand macro="segmentation"/> + <param name="rescue" type="hidden" value="false"/> + <param name="use_existing" type="hidden" value="false"/> + </when> + <when value="rescue"> + <expand macro="segmentation"/> + <param name="rescue" type="hidden" value="true"/> + <param name="use_existing" type="hidden" value="false"/> + </when> + <when value="spine_only"> + <section name="use_ilastik" title="" > + <param name="segmentation_method" type="hidden" value="convert_to_mask" /> + <param name="ilastik_project_type" type="hidden" value="Regular" /> + <param name="ilastik_label_OI" type="hidden" value="3" /> + <param name="probability_threshold" type="hidden" value="0" /> + <section name="background" title="" > + <param name="ilastik_label_BG" type="hidden" value="0"/> + <param name="probability_threshold_BG" type="hidden" value="0"/> + </section> + </section> + <param name="radius_median" type="hidden" value="20" /> + <param name="min_size_particle" type="hidden" value="5000" /> + <param name="keep_only_largest" type="hidden" value="true"/> + <param name="rescue" type="hidden" value="false"/> + <param name="use_existing" type="hidden" value="true"/> + </when> + </conditional> <param name="minimum_diameter" type="integer" min="0" value="20" label="Minimum diameter of inscribed circles" /> <param name="closeness_tolerance" type="integer" min="0" value="50" label="Closeness Tolerance for the spine" help="Maximum distance between circles along the spine"/> <param name="min_similarity" type="float" min="-1" max="1" value="0.1" label="Min similarity for the spine" help="Close to 0 values allow more U shapes while close to 1 values only allows I shapes" /> @@ -77,18 +153,18 @@ </inputs> <outputs> - <data name="logfile" format="txt" from_work_dir="output.log" label="${tool.name} on ID ${omero_object.omero_id} with ${ilastik_project.name}: logfile"> + <data name="logfile" format="txt" from_work_dir="output.log" label="${tool.name} on ${omero_object.object_type} ID ${omero_object.omero_id} with $getVar('mode.use_ilastik.ilastik_project.name', 'no ilastik'): logfile"> </data> - <collection name="tables" type="list" label="${tool.name} on ID ${omero_object.omero_id} with ${ilastik_project.name}: Tables"> + <collection name="tables" type="list" label="${tool.name} on ${omero_object.object_type} ID ${omero_object.omero_id} with $getVar('mode.use_ilastik.ilastik_project.name', 'no ilastik'): Tables"> <discover_datasets pattern="(?P<designation>.+)\.csv" directory="output" format="csv"/> <filter>keep_intermediate</filter> </collection> - <collection name="hyperstacks_with_overlay" type="list" label="${tool.name} on ID ${omero_object.omero_id} with ${ilastik_project.name}: Hyperstacks"> + <collection name="hyperstacks_with_overlay" type="list" label="${tool.name} on ${omero_object.object_type} ID ${omero_object.omero_id} with $getVar('mode.use_ilastik.ilastik_project.name', 'no ilastik'): Hyperstacks"> <discover_datasets pattern="(?P<designation>.+)\.tiff" directory="output" format="tiff"/> </collection> - <collection name="ilastik_results" type="list" label="${tool.name} on ID ${omero_object.omero_id} with ${ilastik_project.name}: Ilastik"> + <collection name="ilastik_results" type="list" label="${tool.name} on ${omero_object.object_type} ID ${omero_object.omero_id} with $getVar('mode.use_ilastik.ilastik_project.name', 'no ilastik'): Ilastik"> <discover_datasets pattern="(?P<designation>.+)\.tif$" directory="output" format="tiff"/> - <filter>keep_intermediate</filter> + <filter>keep_intermediate and mode['use_ilastik']['segmentation_method'] == "ilastik"</filter> </collection> </outputs> <help> @@ -106,12 +182,13 @@ // This macro was written by the BIOP (https://github.com/BIOP) // Romain Guiet and Rémy Dornier // Lucille Delisle modified to support headless + // And to be more robust to OMERO reboot // merge the analysis script with templates available at - // https://github.com/BIOP/OMERO-scripts/tree/main/Fiji + // https://github.com/BIOP/OMERO-scripts/tree/025047955b5c1265e1a93b259c1de4600d00f107/Fiji /* * = COPYRIGHT = - * © All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, BioImaging And Optics Platform (BIOP), 2022 + * © All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, BioImaging And Optics Platform (BIOP), 2023 * * Licensed under the BSD-3-Clause License: * Redistribution and use in source and binary forms, with or without modification, are permitted provided