Mercurial > repos > ufz > omero_dataset_to_plate
comparison omero_dataset_to_plate.py @ 0:5ad32d18fe82 draft
planemo upload for repository https://github.com/Helmholtz-UFZ/galaxy-tools/tree/main/tools/omero commit 636cbb62d59819caca5bc9eab0a8ec31be5bdd46
| author | ufz |
|---|---|
| date | Mon, 16 Dec 2024 20:56:16 +0000 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:5ad32d18fe82 |
|---|---|
| 1 import argparse | |
| 2 import csv | |
| 3 import json | |
| 4 import re | |
| 5 import sys | |
| 6 from collections import defaultdict | |
| 7 | |
| 8 | |
| 9 import omero | |
| 10 from omero.gateway import BlitzGateway | |
| 11 from omero.rtypes import rint, rstring | |
| 12 | |
| 13 | |
| 14 def convert_dataset_to_plate(host, user, pws, port, dataset_id, log_file, mapping_file, delete_dataset): | |
| 15 """ | |
| 16 Connect to OMERO server, convert a dataset to a plate using the specified well mapping file | |
| 17 """ | |
| 18 conn = BlitzGateway(user, pws, host=host, port=port, secure=True) | |
| 19 if not conn.connect(): | |
| 20 sys.exit("ERROR: Failed to connect to OMERO server") | |
| 21 | |
| 22 def log_message(message, status="INFO"): | |
| 23 with open(log_file, 'w') as f: | |
| 24 f.write(f"{message}") | |
| 25 | |
| 26 dataset = conn.getObject("Dataset", dataset_id) | |
| 27 if dataset is None: | |
| 28 conn.close() | |
| 29 sys.exit("ERROR: Dataset not found") | |
| 30 | |
| 31 update_service = conn.getUpdateService() | |
| 32 | |
| 33 # Create a Plate | |
| 34 plate = omero.model.PlateI() | |
| 35 plate.name = rstring(dataset.getName()) | |
| 36 plate = update_service.saveAndReturnObject(plate) | |
| 37 | |
| 38 # Parse the mapping file | |
| 39 image_to_well_mapping = {} | |
| 40 if mapping_file: | |
| 41 with open(mapping_file, 'r') as f: | |
| 42 reader = csv.DictReader(f, delimiter='\t') | |
| 43 for row in reader: | |
| 44 filename = row['Filename'] | |
| 45 well = row['Well'] | |
| 46 match = re.match(r"([A-Z])(\d+)", well) | |
| 47 if match: | |
| 48 row_char, col = match.groups() | |
| 49 row = ord(row_char.upper()) - ord('A') | |
| 50 col = int(col) - 1 | |
| 51 image_to_well_mapping[filename] = (row, col) | |
| 52 else: | |
| 53 conn.close() | |
| 54 sys.exit(f"Invalid well format '{well}' for file '{filename}'") | |
| 55 | |
| 56 # List the dataset children | |
| 57 images = list(dataset.listChildren()) | |
| 58 if not images: | |
| 59 conn.close() | |
| 60 sys.exit("ERROR: No images found in dataset") | |
| 61 | |
| 62 # Compare images in the mapping file and in the dataset | |
| 63 grouped_images = defaultdict(list) | |
| 64 for image in images: | |
| 65 image_name = image.getName() | |
| 66 if image_to_well_mapping: | |
| 67 if image_name in image_to_well_mapping: | |
| 68 row, col = image_to_well_mapping[image_name] | |
| 69 grouped_images[(row, col)].append(image) | |
| 70 else: | |
| 71 conn.close() | |
| 72 sys.exit(f"Image '{image_name}' not found in mapping file.") | |
| 73 | |
| 74 # Assign images to the well based on the mapping file | |
| 75 for (row, col), imgs_in_group in grouped_images.items(): | |
| 76 well = omero.model.WellI() | |
| 77 well.plate = omero.model.PlateI(plate.id.val, False) | |
| 78 well.column = rint(col) | |
| 79 well.row = rint(row) | |
| 80 | |
| 81 for image in imgs_in_group: | |
| 82 ws = omero.model.WellSampleI() | |
| 83 ws.image = omero.model.ImageI(image.id, False) | |
| 84 ws.well = well | |
| 85 well.addWellSample(ws) | |
| 86 | |
| 87 try: | |
| 88 update_service.saveObject(well) | |
| 89 except ValueError as e: | |
| 90 conn.close() | |
| 91 sys.exit("ERROR: Failed to update plate for dataset '{}' due to: {}".format(dataset.getName(), str(e))) | |
| 92 | |
| 93 # Close the connection and, in case, delete the dataset | |
| 94 if delete_dataset is True: | |
| 95 conn.deleteObjects("Dataset", [dataset_id], wait=True) | |
| 96 log_message(f"Images from Dataset {dataset_id} successfully added to Plate {plate.id.val}") | |
| 97 conn.close() | |
| 98 | |
| 99 | |
| 100 if __name__ == "__main__": | |
| 101 parser = argparse.ArgumentParser(description="Convert an OMERO dataset to a plate.") | |
| 102 parser.add_argument("--credential-file", dest="credential_file", type=str, required=True, | |
| 103 help="Credential file (JSON file with username and password for OMERO)") | |
| 104 parser.add_argument('--host', required=True, help='OMERO host') | |
| 105 parser.add_argument('--port', required=True, type=int, help='OMERO port') | |
| 106 parser.add_argument('--dataset_id', type=int, required=True, help="Dataset ID to convert plate") | |
| 107 parser.add_argument('--log_file', default='metadata_import_log.txt', | |
| 108 help='Path to the log file') | |
| 109 parser.add_argument('--mapping_file', | |
| 110 help='Tabular file mapping filenames to well positions (2 columns: filename, Well)') | |
| 111 parser.add_argument('--delete_dataset', action='store_true', | |
| 112 help='Flag to delete the original dataset') | |
| 113 args = parser.parse_args() | |
| 114 | |
| 115 with open(args.credential_file, 'r') as f: | |
| 116 crds = json.load(f) | |
| 117 | |
| 118 convert_dataset_to_plate( | |
| 119 user=crds['username'], | |
| 120 pws=crds['password'], | |
| 121 host=args.host, | |
| 122 port=args.port, | |
| 123 dataset_id=args.dataset_id, | |
| 124 log_file=args.log_file, | |
| 125 mapping_file=args.mapping_file, | |
| 126 delete_dataset=args.delete_dataset | |
| 127 ) |
