# HG changeset patch
# User ufz
# Date 1734382626 0
# Node ID f86ba532846655952556c8d7ef1e847d0bf8a311
# Parent 375281d11535f7e1bddd7de550bc565e8821d19c
planemo upload for repository https://github.com/Helmholtz-UFZ/galaxy-tools/tree/main/tools/omero commit 636cbb62d59819caca5bc9eab0a8ec31be5bdd46
diff -r 375281d11535 -r f86ba5328466 omero_dataset_to_plate.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_dataset_to_plate.py Mon Dec 16 20:57:06 2024 +0000
@@ -0,0 +1,127 @@
+import argparse
+import csv
+import json
+import re
+import sys
+from collections import defaultdict
+
+
+import omero
+from omero.gateway import BlitzGateway
+from omero.rtypes import rint, rstring
+
+
+def convert_dataset_to_plate(host, user, pws, port, dataset_id, log_file, mapping_file, delete_dataset):
+ """
+ Connect to OMERO server, convert a dataset to a plate using the specified well mapping file
+ """
+ conn = BlitzGateway(user, pws, host=host, port=port, secure=True)
+ if not conn.connect():
+ sys.exit("ERROR: Failed to connect to OMERO server")
+
+ def log_message(message, status="INFO"):
+ with open(log_file, 'w') as f:
+ f.write(f"{message}")
+
+ dataset = conn.getObject("Dataset", dataset_id)
+ if dataset is None:
+ conn.close()
+ sys.exit("ERROR: Dataset not found")
+
+ update_service = conn.getUpdateService()
+
+ # Create a Plate
+ plate = omero.model.PlateI()
+ plate.name = rstring(dataset.getName())
+ plate = update_service.saveAndReturnObject(plate)
+
+ # Parse the mapping file
+ image_to_well_mapping = {}
+ if mapping_file:
+ with open(mapping_file, 'r') as f:
+ reader = csv.DictReader(f, delimiter='\t')
+ for row in reader:
+ filename = row['Filename']
+ well = row['Well']
+ match = re.match(r"([A-Z])(\d+)", well)
+ if match:
+ row_char, col = match.groups()
+ row = ord(row_char.upper()) - ord('A')
+ col = int(col) - 1
+ image_to_well_mapping[filename] = (row, col)
+ else:
+ conn.close()
+ sys.exit(f"Invalid well format '{well}' for file '{filename}'")
+
+ # List the dataset children
+ images = list(dataset.listChildren())
+ if not images:
+ conn.close()
+ sys.exit("ERROR: No images found in dataset")
+
+ # Compare images in the mapping file and in the dataset
+ grouped_images = defaultdict(list)
+ for image in images:
+ image_name = image.getName()
+ if image_to_well_mapping:
+ if image_name in image_to_well_mapping:
+ row, col = image_to_well_mapping[image_name]
+ grouped_images[(row, col)].append(image)
+ else:
+ conn.close()
+ sys.exit(f"Image '{image_name}' not found in mapping file.")
+
+ # Assign images to the well based on the mapping file
+ for (row, col), imgs_in_group in grouped_images.items():
+ well = omero.model.WellI()
+ well.plate = omero.model.PlateI(plate.id.val, False)
+ well.column = rint(col)
+ well.row = rint(row)
+
+ for image in imgs_in_group:
+ ws = omero.model.WellSampleI()
+ ws.image = omero.model.ImageI(image.id, False)
+ ws.well = well
+ well.addWellSample(ws)
+
+ try:
+ update_service.saveObject(well)
+ except ValueError as e:
+ conn.close()
+ sys.exit("ERROR: Failed to update plate for dataset '{}' due to: {}".format(dataset.getName(), str(e)))
+
+ # Close the connection and, in case, delete the dataset
+ if delete_dataset is True:
+ conn.deleteObjects("Dataset", [dataset_id], wait=True)
+ log_message(f"Images from Dataset {dataset_id} successfully added to Plate {plate.id.val}")
+ conn.close()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Convert an OMERO dataset to a plate.")
+ parser.add_argument("--credential-file", dest="credential_file", type=str, required=True,
+ help="Credential file (JSON file with username and password for OMERO)")
+ parser.add_argument('--host', required=True, help='OMERO host')
+ parser.add_argument('--port', required=True, type=int, help='OMERO port')
+ parser.add_argument('--dataset_id', type=int, required=True, help="Dataset ID to convert plate")
+ parser.add_argument('--log_file', default='metadata_import_log.txt',
+ help='Path to the log file')
+ parser.add_argument('--mapping_file',
+ help='Tabular file mapping filenames to well positions (2 columns: filename, Well)')
+ parser.add_argument('--delete_dataset', action='store_true',
+ help='Flag to delete the original dataset')
+ args = parser.parse_args()
+
+ with open(args.credential_file, 'r') as f:
+ crds = json.load(f)
+
+ convert_dataset_to_plate(
+ user=crds['username'],
+ pws=crds['password'],
+ host=args.host,
+ port=args.port,
+ dataset_id=args.dataset_id,
+ log_file=args.log_file,
+ mapping_file=args.mapping_file,
+ delete_dataset=args.delete_dataset
+ )
diff -r 375281d11535 -r f86ba5328466 omero_filter.xml
--- a/omero_filter.xml Tue Oct 29 06:59:59 2024 +0000
+++ b/omero_filter.xml Mon Dec 16 20:57:06 2024 +0000
@@ -77,7 +77,7 @@
@@ -114,4 +114,4 @@
10.1038/nmeth.1896
-
\ No newline at end of file
+
diff -r 375281d11535 -r f86ba5328466 omero_metadata_upload.py
--- a/omero_metadata_upload.py Tue Oct 29 06:59:59 2024 +0000
+++ b/omero_metadata_upload.py Mon Dec 16 20:57:06 2024 +0000
@@ -56,6 +56,10 @@
if did is None:
did = ez.post_dataset(conn, dataset_name=str(datetime.now()))
result = upload_metadata(conn, "Dataset", did, data_dict, df, ann_type, an_name)
+ elif obj_type == "plate":
+ result = upload_metadata(conn, "Plate", did, data_dict, df, ann_type, an_name)
+ elif obj_type == "well":
+ result = upload_metadata(conn, "Well", did, data_dict, df, ann_type, an_name)
elif obj_type == "image":
result = upload_metadata(conn, "Image", did, data_dict, df, ann_type, an_name)
else:
@@ -74,10 +78,12 @@
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Import metadata into OMERO.')
- parser.add_argument("--credential-file", dest="credential_file", type=str, required=True, help="Credential file (JSON file with username and password for OMERO)")
+ parser.add_argument("--credential-file", dest="credential_file", type=str, required=True,
+ help="Credential file (JSON file with username and password for OMERO)")
parser.add_argument('--host', required=True, help='OMERO host')
parser.add_argument('--port', required=True, type=int, help='OMERO port')
- parser.add_argument('--obj_type', required=True, choices=['project', 'screen', 'dataset', 'image'],
+ parser.add_argument('--obj_type', required=True, choices=['project', 'screen', 'dataset', 'plate',
+ 'well ', 'image'],
help='Type of OMERO object')
parser.add_argument('--did', type=int, help='ID of the object (if it exists)')
parser.add_argument('--ann_type', required=True, choices=['table', 'KV'], help='Annotation type')
diff -r 375281d11535 -r f86ba5328466 test-data/dataset_conversion_log.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/dataset_conversion_log.txt Mon Dec 16 20:57:06 2024 +0000
@@ -0,0 +1,1 @@
+Images from Dataset 2 successfully added to Plate 1
\ No newline at end of file
diff -r 375281d11535 -r f86ba5328466 test-data/mapping.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/mapping.tsv Mon Dec 16 20:57:06 2024 +0000
@@ -0,0 +1,3 @@
+Filename Well
+sample_A03_image.jpg A2
+sample_H11_image.jpg H5
diff -r 375281d11535 -r f86ba5328466 test-data/omero_output.txt
--- a/test-data/omero_output.txt Tue Oct 29 06:59:59 2024 +0000
+++ b/test-data/omero_output.txt Mon Dec 16 20:57:06 2024 +0000
@@ -1,2 +1,2 @@
-Image:3
-Image:4
+Image:5
+Image:6
diff -r 375281d11535 -r f86ba5328466 test-data/output_KV_import.txt
--- a/test-data/output_KV_import.txt Tue Oct 29 06:59:59 2024 +0000
+++ b/test-data/output_KV_import.txt Mon Dec 16 20:57:06 2024 +0000
@@ -1,1 +1,1 @@
-SUCCESS: Successfully uploaded metadata for dataset with ID 3. Result: {'Key1': 'Value1', 'Key2': 'Value2'}
+SUCCESS: Successfully uploaded metadata for dataset with ID 4. Result: {'Key1': 'Value1', 'Key2': 'Value2'}
diff -r 375281d11535 -r f86ba5328466 test-data/output_filter_filename.tsv
--- a/test-data/output_filter_filename.tsv Tue Oct 29 06:59:59 2024 +0000
+++ b/test-data/output_filter_filename.tsv Mon Dec 16 20:57:06 2024 +0000
@@ -1,1 +1,1 @@
-2
+1