changeset 1:9be687213bc9 draft

planemo upload for repository https://github.com/lldelisle/tools-lldelisle/tree/master/tools/max_projections_stack_and_upload_omero commit 39c8ae77d15d77ddcd8a0bb70f46d1be1531e9cc
author lldelisle
date Fri, 13 Dec 2024 07:58:28 +0000
parents a02156aa8bda
children 3fd95c753cff
files CHANGELOG.md max_projections_stack_and_upload_omero.xml stack_max_projs.groovy upload_omero.sh
diffstat 4 files changed, 63 insertions(+), 25 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/CHANGELOG.md	Fri Dec 13 07:58:28 2024 +0000
@@ -0,0 +1,11 @@
+# CHANGELOG
+
+## 20241213
+
+- Remove unused input plateName in wrapper.
+- Do not break if there is an issue with the nb of slices in one image but try to get the Greys
+- Fix the omero path to create projects/datasets
+
+## 20241212
+
+First release
\ No newline at end of file
--- a/max_projections_stack_and_upload_omero.xml	Thu Dec 12 12:42:20 2024 +0000
+++ b/max_projections_stack_and_upload_omero.xml	Fri Dec 13 07:58:28 2024 +0000
@@ -1,7 +1,7 @@
 <tool id="max_projections_stack_and_upload_omero" name="Stack MaxProj" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="20.01" license="BSD-3">
     <description>And upload to omero</description>
     <macros>
-        <token name="@TOOL_VERSION@">20241212</token>
+        <token name="@TOOL_VERSION@">20241213</token>
         <token name="@VERSION_SUFFIX@">0</token>
     </macros>
     <requirements>
@@ -64,7 +64,6 @@
                     <validator type="regex" message="Enter a valid host location, for example, your.omero.server">^[a-zA-Z0-9._-]*$</validator>
                     <validator type="expression" message="No two dots (..) allowed">'..' not in value</validator>
                 </param>
-                <param name="plateName" type="text" value="Experiment:0" label="Name of the plate (on omero)" />
                 <conditional name="cond_create">
                     <param name="to_create" type="select" label="Create the project/dataset on OMERO or use existing one?">
                         <option value="both">Create a new Project and a new Dataset</option>
--- a/stack_max_projs.groovy	Thu Dec 12 12:42:20 2024 +0000
+++ b/stack_max_projs.groovy	Fri Dec 13 07:58:28 2024 +0000
@@ -29,7 +29,7 @@
  */
 
 // Version number = date of last modif
-VERSION = "20241212"
+VERSION = "20241213"
 
 /**
  * *****************************************************************************************************************
@@ -97,9 +97,9 @@
 							" filter=" + fluo_pattern_list[i]
 						)
 					)
+					// println samplesMap.get(unique_identifier).get(fluo_channels_list[i]).getDimensions()
 					if (!GraphicsEnvironment.isHeadless()){
-						samplesMap.get(unique_identifier).get(
-						fluo_channels_list[i]).show()
+						samplesMap.get(unique_identifier).get(fluo_channels_list[i]).show()
 					}
 				}
 			}
@@ -107,9 +107,9 @@
 			// It is easy as all images are used
 			println "Processing " + unique_identifier + " Greys"
 			samplesMap.get(unique_identifier).put(final_color, FolderOpener.open(current_directory.getAbsolutePath()))
+			// println samplesMap.get(unique_identifier).get(final_color).getDimensions()
 			if (!GraphicsEnvironment.isHeadless()){
-				samplesMap.get(unique_identifier).get(
-				final_color).show()
+				samplesMap.get(unique_identifier).get(final_color).show()
 			}
 		}
 	}
@@ -120,27 +120,55 @@
 		Map<String, ImagePlus> channelsMap = samplesMap.get(unique_identifier)
 		ArrayList<String> channels = []
 		ArrayList<ImagePlus> current_images = []
+		int ref_nT = 0
+		boolean all_compatibles = true
 
 		for(String channel : channelsMap.keySet()){
 			channels.add(channel)
 			current_images.add(channelsMap.get(channel))
+			if (ref_nT == 0) {
+				ref_nT = channelsMap.get(channel).nSlices
+			} else {
+				if (ref_nT != channelsMap.get(channel).nSlices) {
+					all_compatibles = false
+				}
+			}
 		}
-		// Get number of time:
-		int nT = current_images[0].nSlices
-
-		// Merge all
-		ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[])
-		// Re-order to make a multi-channel, time-lapse image
-		ImagePlus final_imp
-		if (channels.size() == 1 && nT == 1) {
-			final_imp = merged_imps
+		
+		if (all_compatibles) {
+			// Merge all
+			ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[])
+			// Re-order to make a multi-channel, time-lapse image
+			ImagePlus final_imp
+			if (channels.size() == 1 && nT == 1) {
+				final_imp = merged_imps
+			} else {
+				try {
+					final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, ref_nT, "xytcz", "Color")
+					// set LUTs
+					(0..channels.size()-1).each{
+						final_imp.setC(it + 1)
+						IJ.run(final_imp, channels[it], "")
+						final_imp.resetDisplayRange()
+					}
+				} catch(Exception e) {
+					println "Could not create the hyperstack for " + unique_identifier + ": " + e
+					continue
+				}
+			}
 		} else {
-			final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, nT, "xytcz", "Color")
-		}
-		// set LUTs
-		(0..channels.size()-1).each{
-			final_imp.setC(it + 1)
-			IJ.run(final_imp, channels[it], "")
+			println "Not all channels have the same number of slices:"
+			(0..channels.size()-1).each{
+				println "Channel " + channels[it] + " has " + current_images[it].getDimensions() + " whCZT."
+			}
+			if (channelsMap.containsKey("Greys")) {
+				println "Will keep only Greys channel"
+				final_imp = channelsMap.get("Greys")
+			} else {
+				println "Will keep only " + channels[0] + " channel"
+				final_imp = current_images[0]
+				IJ.run(final_imp, channels[0], "")
+			}
 			final_imp.resetDisplayRange()
 		}
 		// Save to tiff
--- a/upload_omero.sh	Thu Dec 12 12:42:20 2024 +0000
+++ b/upload_omero.sh	Fri Dec 13 07:58:28 2024 +0000
@@ -8,13 +8,13 @@
 
 if [ "$to_create" = "both" ]; then
     # Create a project:
-    project_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}')
+    project_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}')
     echo "Just created the new project ${project_name_or_id}"
 fi
 if [ "$to_create" = "both" ] || [ "$to_create" = "dataset" ]; then
-    dataset_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}')
+    dataset_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}')
     echo "Just created the new dataset ${dataset_name_or_id}"
-    ${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id}
+    omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id}
 fi
 echo "Start upload"
 omero import -s ${omero_server} -u ${omero_user} -w ${omero_password} --depth 1 -T Dataset:id:"${dataset_name_or_id}" output 2>&1