changeset 5:4291d37da555 draft

planemo upload for repository https://github.com/Helmholtz-UFZ/galaxy-tools/tree/main/tools/omero commit 636cbb62d59819caca5bc9eab0a8ec31be5bdd46
author ufz
date Mon, 16 Dec 2024 20:56:56 +0000
parents 351c6b43e16c
children 6d8603230e85
files omero_dataset_to_plate.py omero_metadata_import.xml omero_metadata_upload.py test-data/dataset_conversion_log.txt test-data/mapping.tsv test-data/omero_output.txt test-data/output_KV_import.txt test-data/output_filter_filename.tsv
diffstat 8 files changed, 147 insertions(+), 8 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_dataset_to_plate.py	Mon Dec 16 20:56:56 2024 +0000
@@ -0,0 +1,127 @@
+import argparse
+import csv
+import json
+import re
+import sys
+from collections import defaultdict
+
+
+import omero
+from omero.gateway import BlitzGateway
+from omero.rtypes import rint, rstring
+
+
+def convert_dataset_to_plate(host, user, pws, port, dataset_id, log_file, mapping_file, delete_dataset):
+    """
+    Connect to OMERO server, convert a dataset to a plate using the specified well mapping file
+    """
+    conn = BlitzGateway(user, pws, host=host, port=port, secure=True)
+    if not conn.connect():
+        sys.exit("ERROR: Failed to connect to OMERO server")
+
+    def log_message(message, status="INFO"):
+        with open(log_file, 'w') as f:
+            f.write(f"{message}")
+
+    dataset = conn.getObject("Dataset", dataset_id)
+    if dataset is None:
+        conn.close()
+        sys.exit("ERROR: Dataset not found")
+
+    update_service = conn.getUpdateService()
+
+    # Create a Plate
+    plate = omero.model.PlateI()
+    plate.name = rstring(dataset.getName())
+    plate = update_service.saveAndReturnObject(plate)
+
+    # Parse the mapping file
+    image_to_well_mapping = {}
+    if mapping_file:
+        with open(mapping_file, 'r') as f:
+            reader = csv.DictReader(f, delimiter='\t')
+            for row in reader:
+                filename = row['Filename']
+                well = row['Well']
+                match = re.match(r"([A-Z])(\d+)", well)
+                if match:
+                    row_char, col = match.groups()
+                    row = ord(row_char.upper()) - ord('A')
+                    col = int(col) - 1
+                    image_to_well_mapping[filename] = (row, col)
+                else:
+                    conn.close()
+                    sys.exit(f"Invalid well format '{well}' for file '{filename}'")
+
+    # List the dataset children
+    images = list(dataset.listChildren())
+    if not images:
+        conn.close()
+        sys.exit("ERROR: No images found in dataset")
+
+    # Compare images in the mapping file and in the dataset
+    grouped_images = defaultdict(list)
+    for image in images:
+        image_name = image.getName()
+        if image_to_well_mapping:
+            if image_name in image_to_well_mapping:
+                row, col = image_to_well_mapping[image_name]
+                grouped_images[(row, col)].append(image)
+            else:
+                conn.close()
+                sys.exit(f"Image '{image_name}' not found in mapping file.")
+
+    # Assign images to the well based on the mapping file
+    for (row, col), imgs_in_group in grouped_images.items():
+        well = omero.model.WellI()
+        well.plate = omero.model.PlateI(plate.id.val, False)
+        well.column = rint(col)
+        well.row = rint(row)
+
+        for image in imgs_in_group:
+            ws = omero.model.WellSampleI()
+            ws.image = omero.model.ImageI(image.id, False)
+            ws.well = well
+            well.addWellSample(ws)
+
+        try:
+            update_service.saveObject(well)
+        except ValueError as e:
+            conn.close()
+            sys.exit("ERROR: Failed to update plate for dataset '{}' due to: {}".format(dataset.getName(), str(e)))
+
+    # Close the connection and, in case, delete the dataset
+    if delete_dataset is True:
+        conn.deleteObjects("Dataset", [dataset_id], wait=True)
+    log_message(f"Images from Dataset {dataset_id} successfully added to Plate {plate.id.val}")
+    conn.close()
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Convert an OMERO dataset to a plate.")
+    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True,
+                        help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument('--host', required=True, help='OMERO host')
+    parser.add_argument('--port', required=True, type=int, help='OMERO port')
+    parser.add_argument('--dataset_id', type=int, required=True, help="Dataset ID to convert plate")
+    parser.add_argument('--log_file', default='metadata_import_log.txt',
+                        help='Path to the log file')
+    parser.add_argument('--mapping_file',
+                        help='Tabular file mapping filenames to well positions (2 columns: filename, Well)')
+    parser.add_argument('--delete_dataset', action='store_true',
+                        help='Flag to delete the original dataset')
+    args = parser.parse_args()
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    convert_dataset_to_plate(
+        user=crds['username'],
+        pws=crds['password'],
+        host=args.host,
+        port=args.port,
+        dataset_id=args.dataset_id,
+        log_file=args.log_file,
+        mapping_file=args.mapping_file,
+        delete_dataset=args.delete_dataset
+    )
--- a/omero_metadata_import.xml	Tue Oct 29 06:59:10 2024 +0000
+++ b/omero_metadata_import.xml	Mon Dec 16 20:56:56 2024 +0000
@@ -30,16 +30,18 @@
             <option value="project">Project</option>
             <option value="screen">Screen</option>
             <option value="dataset">Dataset</option>
+            <option value="plate">Plate</option>
+            <option value="well">Well</option>
             <option value="image">Image</option>
         </param>
         <conditional name="object_id">
-        <param name="object_id_selection" type="select" label="Selection"  help="Create a new OMERO object or target an existing one">
+        <param name="object_id_selection" type="select" label="Selection" help="Create a new OMERO object or target an existing one">
             <option value="new_object">Create new object</option>
             <option value="existing_object">Target an existing object</option>
         </param>
         <when value="new_object"/>
         <when value="existing_object">
-            <param name="did" type="integer" value="" optional="false" label="Object ID"/>
+            <param name="did" type="integer" min = "1" optional="false" label="Object ID"/>
         </when>
         </conditional>
         <param argument="ann_type" type="select" optional="false" label="Annotation type" help="Select annotation format">
--- a/omero_metadata_upload.py	Tue Oct 29 06:59:10 2024 +0000
+++ b/omero_metadata_upload.py	Mon Dec 16 20:56:56 2024 +0000
@@ -56,6 +56,10 @@
                 if did is None:
                     did = ez.post_dataset(conn, dataset_name=str(datetime.now()))
                 result = upload_metadata(conn, "Dataset", did, data_dict, df, ann_type, an_name)
+            elif obj_type == "plate":
+                result = upload_metadata(conn, "Plate", did, data_dict, df, ann_type, an_name)
+            elif obj_type == "well":
+                result = upload_metadata(conn, "Well", did, data_dict, df, ann_type, an_name)
             elif obj_type == "image":
                 result = upload_metadata(conn, "Image", did, data_dict, df, ann_type, an_name)
             else:
@@ -74,10 +78,12 @@
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser(description='Import metadata into OMERO.')
-    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True, help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True,
+                        help="Credential file (JSON file with username and password for OMERO)")
     parser.add_argument('--host', required=True, help='OMERO host')
     parser.add_argument('--port', required=True, type=int, help='OMERO port')
-    parser.add_argument('--obj_type', required=True, choices=['project', 'screen', 'dataset', 'image'],
+    parser.add_argument('--obj_type', required=True, choices=['project', 'screen', 'dataset', 'plate',
+                                                              'well ', 'image'],
                         help='Type of OMERO object')
     parser.add_argument('--did', type=int, help='ID of the object (if it exists)')
     parser.add_argument('--ann_type', required=True, choices=['table', 'KV'], help='Annotation type')
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/dataset_conversion_log.txt	Mon Dec 16 20:56:56 2024 +0000
@@ -0,0 +1,1 @@
+Images from Dataset 2 successfully added to Plate 1
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mapping.tsv	Mon Dec 16 20:56:56 2024 +0000
@@ -0,0 +1,3 @@
+Filename	Well
+sample_A03_image.jpg	A2
+sample_H11_image.jpg	H5
--- a/test-data/omero_output.txt	Tue Oct 29 06:59:10 2024 +0000
+++ b/test-data/omero_output.txt	Mon Dec 16 20:56:56 2024 +0000
@@ -1,2 +1,2 @@
-Image:3
-Image:4
+Image:5
+Image:6
--- a/test-data/output_KV_import.txt	Tue Oct 29 06:59:10 2024 +0000
+++ b/test-data/output_KV_import.txt	Mon Dec 16 20:56:56 2024 +0000
@@ -1,1 +1,1 @@
-SUCCESS: Successfully uploaded metadata for dataset with ID 3. Result: {'Key1': 'Value1', 'Key2': 'Value2'}
+SUCCESS: Successfully uploaded metadata for dataset with ID 4. Result: {'Key1': 'Value1', 'Key2': 'Value2'}
--- a/test-data/output_filter_filename.tsv	Tue Oct 29 06:59:10 2024 +0000
+++ b/test-data/output_filter_filename.tsv	Mon Dec 16 20:56:56 2024 +0000
@@ -1,1 +1,1 @@
-2
+1