changeset 0:8a3324018fc5 draft

planemo upload for repository https://github.com/Helmholtz-UFZ/galaxy-tools/tree/main/tools/omero commit 19d84fd5a372f1428e3e5670144881a56e8af8b2
author ufz
date Tue, 22 Oct 2024 11:54:08 +0000
parents
children 375281d11535
files README.md macros.xml omero_filter.py omero_filter.xml omero_get_id.py omero_get_value.py omero_metadata_upload.py omero_roi_upload.py test-data/input1.tif test-data/input2.tif test-data/input_roi.tsv test-data/input_roi_minimal.tsv test-data/metadata.tsv test-data/omero_output.txt test-data/output_KV_import.txt test-data/output_filter_filename.tsv test-data/output_filter_tag.tsv test-data/output_ids_dataset.tsv test-data/output_ids_image.tsv test-data/output_ids_project.tsv test-data/output_table_import.txt test-data/output_table_roi.txt test-data/output_table_roi_minimal.txt test-data/output_target_import.txt
diffstat 24 files changed, 783 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/README.md	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,19 @@
+# OMERO import images
+
+## Set up user credentials on Galaxy to connect to other omero instance
+
+To enable users to set their credentials for this tool,
+make sure the file `config/user_preferences_extra.yml` has the following section:
+
+```
+    omero_account:
+        description: Your OMERO instance connection credentials
+        inputs:
+            - name: username
+              label: Username
+              type: text
+              required: False
+            - name: password
+              label: Password
+              type:  password
+              required: False
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/macros.xml	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,47 @@
+<macros>
+    <!-- for historic reasons the omero-py version is used as the version for all tools -->
+    <token name="@TOOL_VERSION@">5.18.0</token>
+    <token name="@EZOMERO_VERSION@">3.0.1</token>
+    <token name="@PROFILE@">23.0</token>
+
+    <xml name="ezomero_requirements">
+        <requirements>
+            <requirement type="package" version="@EZOMERO_VERSION@">ezomero</requirement>
+            <requirement type="package" version="2.2.2">pandas</requirement>
+            <yield/>
+        </requirements>
+    </xml>
+
+    <xml name="omeropy_requirements">
+        <requirements>
+            <requirement type="package" version="@TOOL_VERSION@">omero-py</requirement>
+            <!-- openjdk is needed: https://github.com/conda-forge/omero-py-feedstock/pull/16 -->
+            <requirement type="package" version="21.0.2">openjdk</requirement>
+            <yield/>
+        </requirements>
+    </xml>
+    
+
+    <xml name="host_port">
+        <param name="omero_host" type="text" label="OMERO host URL">
+            <validator type="regex" message="Enter a valid host location, for example, your.omero.server">^[a-zA-Z0-9._-]*$</validator>
+            <validator type="expression" message="No two dots (..) allowed">'..' not in value</validator>
+        </param>
+        <param argument="omero_port" type="integer" optional="false" value="4064" label="OMERO port"/>
+        <param name="test_username" type="hidden" value=""/>
+        <param name="test_password" type="hidden" value=""/>
+    </xml>
+    <token name="@HOST_PORT@">
+        --host '$omero_host'
+        --port $omero_port
+    </token>
+
+    <xml name="credentials">
+        <configfile name="credentials"><![CDATA[
+{
+    "username": "$__user__.extra_preferences.get('omero_account|username', $test_username)",
+    "password": "$__user__.extra_preferences.get('omero_account|password', $test_password)"
+}
+        ]]></configfile>
+    </xml>
+</macros>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_filter.py	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,77 @@
+import argparse
+import csv
+import json
+import sys
+
+import ezomero as ez
+
+
+def filter_ids_ezo(user, pws, host, port, filter, id, value1, value2=None, tsv_file="filter_list.tsv"):
+
+    # Transform the id input in a list of integer
+    id = id.split(',')
+    id = list(map(int, id))
+
+    # Function to write tabular file from the ezomero output
+    def write_ids_to_tsv(data):
+        with open(tsv_file, 'w', newline='') as f:
+            writer = csv.writer(f, delimiter='\t')
+            for item in data:
+                writer.writerow([item])  # Write each ID
+
+    with ez.connect(user, pws, "", host, port, secure=True) as conn:
+
+        if filter == "filename":
+            fn_ids = ez.filter_by_filename(conn, id, value1)
+            write_ids_to_tsv(fn_ids)
+            return fn_ids
+
+        elif filter == "KP":
+            kp_ims = ez.filter_by_kv(conn, id, value1, value2)
+            write_ids_to_tsv(kp_ims)
+            return kp_ims
+
+        elif filter == "tag":
+            tg_dict = ez.filter_by_tag_value(conn, id, value1)
+            write_ids_to_tsv(tg_dict)
+            return tg_dict
+
+        else:
+            sys.exit(f"Unsupported object type: {filter}")
+
+
+# Argument parsing
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Fetch and save data as TSV based on object type.")
+    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True,
+                        help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument('--host', required=True,
+                        help="Host server address.")
+    parser.add_argument('--port', required=True, type=int,
+                        help='OMERO port')
+    parser.add_argument('--filter', required=True,
+                        help="Filter type - Filename, Key-Value Pairs, Tag")
+    parser.add_argument('--id', required=True,
+                        help="List of images IDs")
+    parser.add_argument('--value1', required=True,
+                        help="First searching values - Filename, Key, Tag")
+    parser.add_argument('--value2', required=False,
+                        help="Second searching values - Value (necessary just for Key-Value Pairs filter")
+    parser.add_argument('--tsv_file', default='filter_list.tsv',
+                        help="Output TSV file path.")
+    args = parser.parse_args()
+
+    if args.filter == "KP" and args.value2 is None:
+        raise ValueError("'--value 2' is necessary to retrieve KP")
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    # Call the main function to get the object and save it as a TSV
+    filter_ids_ezo(user=crds['username'], pws=crds['password'], host=args.host,
+                   port=args.port,
+                   filter=args.filter,
+                   value1=args.value1,
+                   value2=args.value2,
+                   id=args.id,
+                   tsv_file=args.tsv_file)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_filter.xml	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,115 @@
+<tool id="omero_filter" name="OMERO IDs" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="@PROFILE@" license="MIT">
+    <description> with ezomero </description>
+    <macros>
+        <import>macros.xml</import>
+        <token name="@VERSION_SUFFIX@">0</token>
+    </macros>
+    <xrefs>
+        <xref type="bio.tools">omero</xref>
+    </xrefs>
+    <expand macro="ezomero_requirements"/>
+    <command detect_errors="exit_code"><![CDATA[
+        python '$__tool_directory__'/omero_filter.py
+        --credential-file '$credentials'
+        @HOST_PORT@
+        --filter $filter
+        --value1 '$value1'
+        --id $did
+        --tsv_file '$tsv'
+        #if $filter == "KP"
+        --value2 '$value2'
+        #end if
+
+    ]]></command>
+    <configfiles>
+        <expand macro="credentials"/>
+    </configfiles>
+    <inputs>
+        <expand macro="host_port"/>
+        <conditional name = "filter_type">
+            <param name="filter" type="select" optional="false" label="Filter type to apply:">
+                <option value="filename">Filename</option>
+                <option value="KP">Key-Value</option>
+                <option value="tag">Tag</option>
+            </param>
+            <when value="filename">
+                <param name="value1" type="text" label="Filename to search among the image IDs">
+                    <validator type="regex" message="Enter a valid filename to search in the OMERO server">^[\w\-. ]+$</validator>
+                </param>
+                <param name="value2"  value="" type="hidden" label="Not necessary filter"/>
+                <param name="did" type="text" label="List of images IDs">
+                    <validator type="regex" message="Enter a valid list of IDs (i.e. 2,45,56,67)">^\d+(,\d+)*$</validator>
+                </param>
+            </when>
+            <when value="KP">
+                <param name="value1" type="text" label="Key Value to search among the image IDs">
+                    <validator type="regex" message="Enter a valid Key to search in the OMERO server">^[\w\-. ]+$</validator>
+                </param>
+                <param name="value2" type="hidden" label="Pair Values to search among images IDs"/>
+                <param name="did" type="text" label="List of images IDs">
+                    <validator type="regex" message="Enter a valid list of IDs (i.e. 2,45,56,67)">^\d+(,\d+)*$</validator>
+                </param>
+            </when>
+            <when value="tag">
+                <param name="value1" type="text" label="Tag to search among the images IDs">
+                    <validator type="regex" message="Enter a valid Key to search in the OMERO server">^[\w\-. ]+$</validator>
+                </param>
+                <param name="value2"  value="" optional="true" type="hidden" label="Not necessary filter"/>
+                <param name="did" type="text" label="List of images IDs">
+                    <validator type="regex" message="Enter a valid list of IDs (i.e. 2,45,56,67)">^(\d+)(,\d+)*$</validator>
+                </param>
+            </when>
+        </conditional>
+    </inputs>
+    <outputs>
+        <data name="tsv" format="tabular"/>
+    </outputs>
+    <tests>
+        <test>
+            <param name="omero_host" value="host.docker.internal"/>
+            <param name="omero_port" value="6064"/>
+            <conditional name="filter_type">
+                <param name="filter" value="filename"/>
+                <param name="value1" value="sample_image_2.jpg"/>
+                <param name="did" value="1,2"/>
+            </conditional>
+            <param name="test_username" value="root"/>
+            <param name="test_password" value="omero"/>
+            <output name="tsv" value="output_filter_filename.tsv" ftype="tabular">
+                <assert_contents>
+                    <has_text text="2"/>
+                    <has_n_columns n="1"/>
+                </assert_contents>
+            </output>
+        </test>
+        <test>
+            <param name="omero_host" value="host.docker.internal"/>
+            <param name="omero_port" value="6064"/>
+            <conditional name="filter_type">
+                <param name="filter" value="tag"/>
+                <param name="value1" value="test_tag"/>
+                <param name="did" value="1,2"/>
+            </conditional>
+            <param name="test_username" value="root"/>
+            <param name="test_password" value="omero"/>
+            <output name="tsv" value="output_filter_tag.tsv" ftype="tabular">
+                <assert_contents>
+                    <has_text text="1"/>
+                    <has_n_columns n="1"/>
+                </assert_contents>
+            </output>
+        </test>
+    </tests>
+    <help>
+Description
+-----------
+
+Tool to filter images IDs by filename, Key-Value Pairs and Tag value.
+For Key-Value Pairs search, two values are required (Value1 = Key, Value2 = Pair).
+IDs are a list of image IDs which can be fetched using the omero_get tool.
+
+    </help>
+    <citations>
+        <citation type="doi">10.1038/nmeth.1896</citation>
+    </citations>
+</tool>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_get_id.py	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,116 @@
+import argparse
+import csv
+import json
+import sys
+
+import ezomero as ez
+
+
+def get_ids_ezo(user, pws, host, port, final_obj_type, parent_obj_type, parent_id=None, tsv_file="id_list.tsv"):
+
+    # Function to write tabular file from the ezomero output
+    def write_ids_to_tsv(data):
+        with open(tsv_file, 'w', newline='') as f:
+            writer = csv.writer(f, delimiter='\t')
+            for item in data:
+                writer.writerow([item])  # Write each ID
+
+    with ez.connect(user, pws, "", host, port, secure=True) as conn:
+
+        if final_obj_type == "Project":
+            proj_ids = ez.get_project_ids(conn)
+            write_ids_to_tsv(proj_ids)
+            return proj_ids
+
+        elif final_obj_type == "Dataset":
+            args = {'project': None}
+            if parent_obj_type == "Project":
+                args['project'] = parent_id
+            ds_ids = ez.get_dataset_ids(conn, **args)
+            write_ids_to_tsv(ds_ids)
+            return ds_ids
+
+        elif final_obj_type == "Image":
+            args = {
+                'project': None,
+                'dataset': None,
+                'plate': None,
+                'well': None
+            }
+            if parent_obj_type == "Project":
+                args['project'] = parent_id
+            elif parent_obj_type == "Dataset":
+                args['dataset'] = parent_id
+            elif parent_obj_type == "Plate":
+                args['plate'] = parent_id
+            elif parent_obj_type == "Well":
+                args['well'] = parent_id
+            elif parent_obj_type != "All":
+                raise ValueError("Object set as parent_obj_type is not compatible")
+
+            ds_ims = ez.get_image_ids(conn, **args)
+            write_ids_to_tsv(ds_ims)
+            return ds_ims
+
+        elif final_obj_type == "Annotation":
+            map_annot_ids = ez.get_map_annotation_ids(conn, parent_obj_type, parent_id)
+            write_ids_to_tsv(map_annot_ids)
+            return map_annot_ids
+
+        elif final_obj_type == "Tag":
+            tag_ids = ez.get_tag_ids(conn, parent_obj_type, parent_id)
+            write_ids_to_tsv(tag_ids)
+            return tag_ids
+
+        elif final_obj_type == "Roi":
+            roi_ids = ez.get_roi_ids(conn, parent_id)
+            write_ids_to_tsv(roi_ids)
+            return roi_ids
+
+        elif final_obj_type == "Table":
+            file_ann_ids = ez.get_file_annotation_ids(conn, parent_obj_type, parent_id)
+            write_ids_to_tsv(file_ann_ids)
+            return file_ann_ids
+
+        else:
+            sys.exit(f"Unsupported object type: {filter}")
+
+
+# Argument parsing
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Fetch OMERO object IDs as TSV from parent object.")
+    parser.add_argument("--credential-file", dest="credential_file", type=str,
+                        required=True, help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument('--host', required=True,
+                        help="Host server address.")
+    parser.add_argument('--port', required=True, type=int,
+                        help='OMERO port')
+    parser.add_argument('--final_obj_type', required=True,
+                        help="Type of object to fetch ID: Project, Dataset, Image, Annotation, Tag, Roi, or Table.")
+    parser.add_argument('--parent_obj_type', required=True,
+                        help="Type of object from which you fetch IDs: Project, Dataset, Plate, Well, Image (or 'All' if you want to get all objects).")
+    parser.add_argument('--parent_id', required=False, type=int,
+                        help="ID of the OMERO object in `--parent_obj_type`, not required if you used `--parent_obj_type All`.")
+    parser.add_argument('--tsv_file', default='id_list.tsv',
+                        help="Output TSV file path.")
+    args = parser.parse_args()
+
+    if args.parent_id is None and args.parent_obj_type != "All":
+        raise ValueError("ID is only optional is you use `--parent_obj_type All`")
+
+    if args.final_obj_type == "Roi" and args.parent_obj_type != "Image":
+        raise ValueError("Roi IDs can only be retrived from images, use `--parent_obj_type Image`")
+
+    if args.parent_obj_type == "All" and args.final_obj_type not in ["Image", "Dataset", "Project"]:
+        raise ValueError("Only Images, Datasets and Projects is compatible with `--parent_obj_type All`")
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    # Call the main function to get the object and save it as a TSV
+    get_ids_ezo(user=crds['username'], pws=crds['password'], host=args.host,
+                port=args.port,
+                final_obj_type=args.final_obj_type,
+                parent_obj_type=args.parent_obj_type,
+                parent_id=args.parent_id,
+                tsv_file=args.tsv_file)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_get_value.py	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,98 @@
+import argparse
+import csv
+import json
+import sys
+
+import ezomero as ez
+
+
+def get_object_ezo(user, pws, host, port, obj_type, ids, tsv_file):
+    # Function to write tabular file from the ezomero output
+    def write_values_to_tsv(data, header):
+        with open(tsv_file, 'w', newline='') as f:
+            writer = csv.writer(f, delimiter='\t')
+            writer.writerow([header])  # Write the header
+            for item in data:
+                writer.writerow([item])  # Write each value
+
+    # Function to write tabular file from a dictionary ezomero output
+    def write_dict_to_tsv(data, headers):
+        with open(tsv_file, 'w', newline='') as f:
+            writer = csv.writer(f, delimiter='\t')
+            writer.writerow(headers)  # Write the headers
+            for key, value in data.items():
+                writer.writerow([key, value])  # Write each key-value pair
+
+    # Function to write tabular file from list of list ezomero output
+    def write_table_to_tsv(data):
+        with open(tsv_file, 'w') as f:
+            for row in data:
+                f.write('\t'.join([str(val) for val in row]) + '\n')
+
+    with ez.connect(user, pws, "", host, port, secure=True) as conn:
+        if obj_type == "Annotation":
+            ma_dict = {}
+            for maid in ids:
+                current_ma_dict = ez.get_map_annotation(conn, maid)
+                ma_dict = {**ma_dict, **current_ma_dict}
+            write_dict_to_tsv(ma_dict, ["Annotation ID", "Annotation Value"])
+            return ma_dict
+        elif obj_type == "Tag":
+            tags = []
+            for tag_id in ids:
+                tags.append(ez.get_tag(conn, tag_id))
+            # Sort the tags for consistency:
+            tags.sort
+            write_values_to_tsv(tags, "Tags")
+            return tags
+        elif obj_type == "Table":
+            if len(ids) > 1:
+                raise ValueError("Only one table can be exported at a time")
+            table = ez.get_table(conn, ids[0])
+            write_table_to_tsv(table)
+            return table
+
+        else:
+            sys.exit(f"Unsupported object type: {filter}")
+
+
+# Argument parsing
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description="Fetch and save data as TSV based on object type.")
+    parser.add_argument("--credential-file", dest="credential_file", type=str,
+                        required=True, help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument('--host', required=True,
+                        help="Host server address.")
+    parser.add_argument('--port', required=True, type=int,
+                        help='OMERO port')
+    parser.add_argument('--obj_type', required=True,
+                        help="Type of object to fetch: Annotation, Table or Tag.")
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('--ids', nargs='+', type=int,
+                       help="IDs of the OMERO objects.")
+    group.add_argument('--ids_path',
+                       help="File with IDs of the OMERO objects (one per line).")
+    parser.add_argument('--tsv_file', default='id_list.tsv', required=True,
+                        help="Output TSV file path.")
+    args = parser.parse_args()
+
+    if args.ids_path:
+        args.ids = []
+        with open(args.ids_path, 'r') as f:
+            for line in f:
+                try:
+                    args.ids.append(int(line))
+                except ValueError:
+                    print(f"{line.strip()} is not a valid ID.")
+        if len(args.ids) == 0:
+            raise ValueError("Cound not find a single ID in the file.")
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    # Call the main function to get the object and save it as a TSV
+    get_object_ezo(user=crds['username'], pws=crds['password'], host=args.host,
+                   port=args.port,
+                   obj_type=args.obj_type,
+                   ids=args.ids,
+                   tsv_file=args.tsv_file)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_metadata_upload.py	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,95 @@
+import argparse
+import json
+from datetime import datetime
+
+import ezomero as ez
+import pandas as pd
+
+
+def metadata_import_ezo(user, pws, host, port, obj_type, did=None, ann_type="table", ann_file=None, an_name=None,
+                        log_file='metadata_import_log.txt'):
+    def upload_metadata(conn, obj_type, did, data_dict, df, ann_type, an_name):
+        try:
+            if ann_type == "KV":
+                id_map_ann = ez.post_map_annotation(conn, obj_type, object_id=int(did), kv_dict=data_dict, ns=an_name)
+                ma_dict = ez.get_map_annotation(conn, id_map_ann)
+                return ma_dict
+            elif ann_type == "table":
+                id_tb_ann = ez.post_table(conn, df, object_type=obj_type, object_id=int(did), title=an_name,
+                                          headers=True)
+                tb_dict = ez.get_table(conn, id_tb_ann)
+                return tb_dict
+        except Exception as e:
+            log_error(f"Error uploading metadata for {obj_type} with ID {did}: {str(e)}")
+            return None
+
+    def log_error(message):
+        with open(log_file, 'w') as f:
+            f.write(f"ERROR: {message}\n")
+
+    def log_success(message):
+        with open(log_file, 'w') as f:
+            f.write(f"SUCCESS: {message}\n")
+
+    try:
+        df = pd.read_csv(ann_file, delimiter='\t')
+    except FileNotFoundError as e:
+        log_error(f"Annotation file not found: {str(e)}")
+        return
+
+    if ann_type == "table":
+        data_dict = df.to_dict(orient='records')
+    elif ann_type == "KV":
+        data_dict = {col: df[col].iloc[0] for col in df.columns}
+
+    try:
+        with ez.connect(user, pws, "", host, port, secure=True) as conn:
+            if obj_type == "project":
+                if did is None:
+                    did = ez.post_project(conn, project_name=str(datetime.now()))
+                result = upload_metadata(conn, "Project", did, data_dict, df, ann_type, an_name)
+            elif obj_type == "screen":
+                if did is None:
+                    did = ez.post_screen(conn, screen_name=str(datetime.now()))
+                result = upload_metadata(conn, "Screen", did, data_dict, df, ann_type, an_name)
+            elif obj_type == "dataset":
+                if did is None:
+                    did = ez.post_dataset(conn, dataset_name=str(datetime.now()))
+                result = upload_metadata(conn, "Dataset", did, data_dict, df, ann_type, an_name)
+            elif obj_type == "image":
+                result = upload_metadata(conn, "Image", did, data_dict, df, ann_type, an_name)
+            else:
+                raise ValueError("Unsupported object type provided: {}".format(obj_type))
+
+            if result is not None:
+                log_success(f"Successfully uploaded metadata for {obj_type} with ID {did}. Result: {result}")
+            else:
+                log_error(f"Failed to upload metadata for {obj_type} with ID {did}.")
+
+        conn.close()
+
+    except Exception as e:
+        log_error(f"Connection error: {str(e)}")
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description='Import metadata into OMERO.')
+    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True, help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument('--host', required=True, help='OMERO host')
+    parser.add_argument('--port', required=True, type=int, help='OMERO port')
+    parser.add_argument('--obj_type', required=True, choices=['project', 'screen', 'dataset', 'image'],
+                        help='Type of OMERO object')
+    parser.add_argument('--did', type=int, help='ID of the object (if it exists)')
+    parser.add_argument('--ann_type', required=True, choices=['table', 'KV'], help='Annotation type')
+    parser.add_argument('--ann_file', required=True, help='Path to the annotation file')
+    parser.add_argument('--an_name', required=True, help='Namespace or title for the annotation')
+    parser.add_argument('--log_file', default='metadata_import_log.txt', help='Path to the log file')
+
+    args = parser.parse_args()
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    metadata_import_ezo(user=crds['username'], pws=crds['password'], host=args.host, port=args.port,
+                        obj_type=args.obj_type, did=args.did, ann_type=args.ann_type,
+                        ann_file=args.ann_file, an_name=args.an_name, log_file=args.log_file)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/omero_roi_upload.py	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,176 @@
+import argparse
+import json
+import re
+
+import numpy as np
+import pandas as pd
+from ezomero import connect, post_roi
+from ezomero.rois import Ellipse, Label, Line, Point, Polygon, Polyline, Rectangle
+
+
+def parse_color(color_str):
+    if not color_str:
+        return None
+    return tuple(map(int, re.findall(r'\d+', color_str)))
+
+
+def parse_points(points_str):
+    if not points_str:
+        return None
+    # Remove leading and trailing brackets and split into individual points
+    points_str = points_str.strip("[]")
+    points = points_str.split("),(")
+    points = [point.strip("()") for point in points]  # Remove any remaining parentheses
+    return [tuple(map(float, point.split(','))) for point in points]
+
+
+def create_shape(row):
+    shape_type = row['shape']
+    shape = None
+
+    if shape_type == 'Ellipse':
+        shape = Ellipse(
+            x=row['x'],
+            y=row['y'],
+            x_rad=row['x_rad'],
+            y_rad=row['y_rad'],
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            label=row.get('label'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Label':
+        shape = Label(
+            x=row['x'],
+            y=row['y'],
+            label=row['label'],
+            fontSize=row['fontSize'],
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Line':
+        shape = Line(
+            x1=row['x1'],
+            y1=row['y1'],
+            x2=row['x2'],
+            y2=row['y2'],
+            markerStart=row.get('markerStart', None),
+            markerEnd=row.get('markerEnd', None),
+            label=row.get('label'),
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Point':
+        shape = Point(
+            x=row['x'],
+            y=row['y'],
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            label=row.get('label'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Polygon':
+        shape = Polygon(
+            points=parse_points(row['points']),
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            label=row.get('label'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Polyline':
+        shape = Polyline(
+            points=parse_points(row['points']),
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            label=row.get('label'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    elif shape_type == 'Rectangle':
+        shape = Rectangle(
+            x=row['x'],
+            y=row['y'],
+            width=row['width'],
+            height=row['height'],
+            z=row.get('z'),
+            c=row.get('c'),
+            t=row.get('t'),
+            label=row.get('label'),
+            fill_color=parse_color(row.get('fill_color')),
+            stroke_color=parse_color(row.get('stroke_color')),
+            stroke_width=row.get('stroke_width')
+        )
+    return shape
+
+
+def main(input_file, conn, image_id, log_file):
+    # Open log file
+    with open(log_file, 'w') as log:
+        df = pd.read_csv(input_file, sep='\t')
+        # Replace nan to none
+        df = df.replace({np.nan: None})
+        for index, row in df.iterrows():
+            msg = f"Processing row {index + 1}/{len(df)}: {row.to_dict()}"
+            print(msg)
+            log.write(msg + "\n")
+            shape = create_shape(row)
+            if shape:
+                roi_name = row['roi_name'] if 'roi_name' in row else None
+                roi_description = row['roi_description'] if 'roi_description' in row else None
+                roi_id = post_roi(conn, image_id, [shape], name=roi_name, description=roi_description)
+                msg = f"ROI ID: {roi_id} for row {index + 1}"
+                print(msg)
+                log.write(msg + "\n")
+            else:
+                msg = f"Skipping row {index + 1}: Unable to create shape"
+                print(msg)
+                log.write(msg + "\n")
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description="Create shapes from a tabular file and optionally post them as an ROI to OMERO.")
+    parser.add_argument("--input_file", help="Path to the input tabular file.")
+    parser.add_argument("--image_id", type=int, required=True, help="ID of the image to which the ROI will be linked")
+    parser.add_argument("--host", type=str, required=True, help="OMERO server host")
+    parser.add_argument("--credential-file", dest="credential_file", type=str, required=True, help="Credential file (JSON file with username and password for OMERO)")
+    parser.add_argument("--port", type=int, default=4064, help="OMERO server port")
+    parser.add_argument("--log_file", type=str, default="process.txt", help="Log file path")
+
+    args = parser.parse_args()
+
+    with open(args.credential_file, 'r') as f:
+        crds = json.load(f)
+
+    conn = connect(
+        host=args.host,
+        user=crds['username'],
+        password=crds['password'],
+        port=args.port,
+        group="",
+        secure=True
+    )
+
+    try:
+        main(args.input_file, conn, args.image_id, args.log_file)
+    finally:
+        conn.close()
Binary file test-data/input1.tif has changed
Binary file test-data/input2.tif has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/input_roi.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,8 @@
+shape	x	y	x_rad	y_rad	label	fontSize	x1	y1	x2	y2	points	width	height	fill_color	stroke_color	stroke_width	z	c	t	roi_name	roi_description
+Ellipse	50.0	50.0	20.0	10.0										(255,0,0,128)	(0,0,0,255)	2	0	0	0	Example ROI	This is an example ROI
+Label	100.0	100.0			Test Label	12.0								(255,255,255,0)	(0,0,255,255)	1	0	0	0	Example ROI	This is an example ROI
+Line							200.0	200.0	250.0	250.0				(0,255,0,128)	(0,0,0,255)	2	0	1	0	Example ROI	This is an example ROI
+Point	150.0	150.0												(0,0,255,128)	(255,0,0,255)	3	0	2	0	Example ROI	This is an example ROI
+Polygon											(300,300),(350,350),(300,400)			(255,255,0,128)	(0,0,0,255)	2	1	0	0	Example ROI	This is an example ROI
+Polyline											(400,400),(450,450),(400,500)			(0,255,255,128)	(0,0,0,255)	3	0	0	0	Example ROI	This is an example ROI
+Rectangle	500.0	500.0										100.0	50.0	(255,0,255,128)	(0,0,0,255)	2	0	0	0	Example ROI	This is an example ROI
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/input_roi_minimal.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+shape	points	label
+Polygon	(300,300),(350,350),(300,400)	Example ROI
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/metadata.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+Key1	Key2
+Value1	Value2
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/omero_output.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+Image:3
+Image:4
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_KV_import.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+SUCCESS: Successfully uploaded metadata for dataset with ID 3. Result: {'Key1': 'Value1', 'Key2': 'Value2'}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_filter_filename.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_filter_tag.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_ids_dataset.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_ids_image.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+1
+2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_ids_project.tsv	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_table_import.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+SUCCESS: Successfully uploaded metadata for project with ID 2. Result:      Key1    Key2
+0  Value1  Value2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_table_roi.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,14 @@
+Processing row 1/7: {'shape': 'Ellipse', 'x': 50.0, 'y': 50.0, 'x_rad': 20.0, 'y_rad': 10.0, 'label': None, 'fontSize': None, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': None, 'width': None, 'height': None, 'fill_color': '(255,0,0,128)', 'stroke_color': '(0,0,0,255)', 'stroke_width': 2, 'z': 0, 'c': 0, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 1 for row 1
+Processing row 2/7: {'shape': 'Label', 'x': 100.0, 'y': 100.0, 'x_rad': None, 'y_rad': None, 'label': 'Test Label', 'fontSize': 12.0, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': None, 'width': None, 'height': None, 'fill_color': '(255,255,255,0)', 'stroke_color': '(0,0,255,255)', 'stroke_width': 1, 'z': 0, 'c': 0, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 2 for row 2
+Processing row 3/7: {'shape': 'Line', 'x': None, 'y': None, 'x_rad': None, 'y_rad': None, 'label': None, 'fontSize': None, 'x1': 200.0, 'y1': 200.0, 'x2': 250.0, 'y2': 250.0, 'points': None, 'width': None, 'height': None, 'fill_color': '(0,255,0,128)', 'stroke_color': '(0,0,0,255)', 'stroke_width': 2, 'z': 0, 'c': 1, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 3 for row 3
+Processing row 4/7: {'shape': 'Point', 'x': 150.0, 'y': 150.0, 'x_rad': None, 'y_rad': None, 'label': None, 'fontSize': None, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': None, 'width': None, 'height': None, 'fill_color': '(0,0,255,128)', 'stroke_color': '(255,0,0,255)', 'stroke_width': 3, 'z': 0, 'c': 2, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 4 for row 4
+Processing row 5/7: {'shape': 'Polygon', 'x': None, 'y': None, 'x_rad': None, 'y_rad': None, 'label': None, 'fontSize': None, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': '(300,300),(350,350),(300,400)', 'width': None, 'height': None, 'fill_color': '(255,255,0,128)', 'stroke_color': '(0,0,0,255)', 'stroke_width': 2, 'z': 1, 'c': 0, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 5 for row 5
+Processing row 6/7: {'shape': 'Polyline', 'x': None, 'y': None, 'x_rad': None, 'y_rad': None, 'label': None, 'fontSize': None, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': '(400,400),(450,450),(400,500)', 'width': None, 'height': None, 'fill_color': '(0,255,255,128)', 'stroke_color': '(0,0,0,255)', 'stroke_width': 3, 'z': 0, 'c': 0, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 6 for row 6
+Processing row 7/7: {'shape': 'Rectangle', 'x': 500.0, 'y': 500.0, 'x_rad': None, 'y_rad': None, 'label': None, 'fontSize': None, 'x1': None, 'y1': None, 'x2': None, 'y2': None, 'points': None, 'width': 100.0, 'height': 50.0, 'fill_color': '(255,0,255,128)', 'stroke_color': '(0,0,0,255)', 'stroke_width': 2, 'z': 0, 'c': 0, 't': 0, 'roi_name': 'Example ROI', 'roi_description': 'This is an example ROI'}
+ROI ID: 7 for row 7
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_table_roi_minimal.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,2 @@
+Processing row 1/1: {'shape': 'Polygon', 'points': '(300,300),(350,350),(300,400)', 'label': 'Example ROI'}
+ROI ID: 8 for row 1
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output_target_import.txt	Tue Oct 22 11:54:08 2024 +0000
@@ -0,0 +1,1 @@
+SUCCESS: Successfully uploaded metadata for dataset with ID 1. Result: {'Key1': 'Value1', 'Key2': 'Value2'}