# HG changeset patch # User imgteam # Date 1645372018 0 # Node ID 4e089a0983b13b56270377b3fd7aad3ad14c17f8 # Parent b0503eec7bd6950020dc1c5d47091d298ba9fc59 "planemo upload for repository https://github.com/BMCV/galaxy-image-analysis/tools/landmark_registration/ commit 927b78d47c31714776ccdf3d16f26c3779298abb" diff -r b0503eec7bd6 -r 4e089a0983b1 landmark_registration.py --- a/landmark_registration.py Fri Feb 22 19:04:47 2019 -0500 +++ b/landmark_registration.py Sun Feb 20 15:46:58 2022 +0000 @@ -1,27 +1,65 @@ -from skimage.measure import ransac -from skimage.transform import AffineTransform -import pandas as pd -import numpy as np +""" +Copyright 2017-2022 Biomedical Computer Vision Group, Heidelberg University. + +Distributed under the MIT license. +See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + +""" + import argparse -def landmark_registration(points_file1, points_file2, out_file, residual_threshold=2, max_trials=100, delimiter="\t"): - points1 = pd.read_csv(points_file1, delimiter=delimiter) - points2 = pd.read_csv(points_file2, delimiter=delimiter) +import numpy as np +import pandas as pd +from scipy.linalg import lstsq +from skimage.measure import ransac +from skimage.transform import AffineTransform + + +def landmark_registration(pts_f1, pts_f2, out_f, res_th=None, max_ite=None, delimiter="\t"): + + points1 = pd.read_csv(pts_f1, delimiter=delimiter) + points2 = pd.read_csv(pts_f2, delimiter=delimiter) + + src = np.concatenate([np.array(points1['x']).reshape([-1, 1]), + np.array(points1['y']).reshape([-1, 1])], + axis=-1) + dst = np.concatenate([np.array(points2['x']).reshape([-1, 1]), + np.array(points2['y']).reshape([-1, 1])], + axis=-1) - src = np.concatenate([np.array(points1['x']).reshape([-1,1]), np.array(points1['y']).reshape([-1,1])], axis=-1) - dst = np.concatenate([np.array(points2['x']).reshape([-1,1]), np.array(points2['y']).reshape([-1,1])], axis=-1) + if res_th is not None and max_ite is not None: + model_robust, inliers = ransac((src, dst), AffineTransform, min_samples=3, residual_threshold=res_th, max_trials=max_ite) + pd.DataFrame(model_robust.params).to_csv(out_f, header=None, index=False, sep="\t") - model = AffineTransform() - model_robust, inliers = ransac((src, dst), AffineTransform, min_samples=3, - residual_threshold=residual_threshold, max_trials=max_trials) - pd.DataFrame(model_robust.params).to_csv(out_file, header=None, index=False, sep="\t") + else: + A = np.zeros((src.size, 6)) + A[0:src.shape[0], [0, 1]] = src + A[0:src.shape[0], 2] = 1 + A[src.shape[0]:, [3, 4]] = src + A[src.shape[0]:, 5] = 1 + b = dst.T.flatten().astype('float64') + x = lstsq(A, b) + + tmat = np.eye(3) + tmat[0, :] = x[0].take([0, 1, 2]) + tmat[1, :] = x[0].take([3, 4, 5]) + pd.DataFrame(tmat).to_csv(out_f, header=None, index=False, sep="\t") + if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Estimate transformation from points") - parser.add_argument("points_file1", help="Paste path to src points") - parser.add_argument("points_file2", help="Paste path to dst points") - parser.add_argument("warp_matrix", help="Paste path to warp_matrix.csv that should be used for transformation") - parser.add_argument("--residual_threshold", dest="residual_threshold", help="Maximum distance for a data point to be classified as an inlier.", type=float, default=2) - parser.add_argument("--max_trials", dest="max_trials", help="Maximum number of iterations for random sample selection.", type=int, default=100) + parser = argparse.ArgumentParser(description="Estimate affine transformation matrix based on landmark coordinates") + parser.add_argument("fn_pts1", help="Coordinates of SRC landmarks (tsv file)") + parser.add_argument("fn_pts2", help="Coordinates of DST landmarks (tsv file)") + parser.add_argument("fn_tmat", help="Path the output (transformation matrix)") + parser.add_argument("--res_th", dest="res_th", type=float, help="Maximum distance for a data point to be classified as an inlier") + parser.add_argument("--max_ite", dest="max_ite", type=int, help="Maximum number of iterations for random sample selection") args = parser.parse_args() - landmark_registration(args.points_file1, args.points_file2, args.warp_matrix, residual_threshold=args.residual_threshold, max_trials=args.max_trials) + + res_th = None + if args.res_th: + res_th = args.res_th + max_ite = None + if args.max_ite: + max_ite = args.max_ite + + landmark_registration(args.fn_pts1, args.fn_pts2, args.fn_tmat, res_th=res_th, max_ite=max_ite) diff -r b0503eec7bd6 -r 4e089a0983b1 landmark_registration.xml --- a/landmark_registration.xml Fri Feb 22 19:04:47 2019 -0500 +++ b/landmark_registration.xml Sun Feb 20 15:46:58 2022 +0000 @@ -1,40 +1,62 @@ - - Landmark Registration + + estimates the affine transformation matrix - scikit-image - pandas - numpy + scikit-image + scipy + pandas + numpy - + - - - - + + + + + + + + + + + + + - + - - - - - + + + + + + + + + + + + **What it does** This tool estimates the transformation matrix between two sets of 2d points. + + About the format of landmark coordinates in the input TSV table: Columns with header "x" and "y" are for x- and y-coordinate, respectively. 10.1016/j.jbiotec.2017.07.019 diff -r b0503eec7bd6 -r 4e089a0983b1 test-data/points_fixed.tsv --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/points_fixed.tsv Sun Feb 20 15:46:58 2022 +0000 @@ -0,0 +1,6 @@ + x y +0 33 107 +1 169 74 +2 178 207 +3 230 136 +4 114 131 \ No newline at end of file diff -r b0503eec7bd6 -r 4e089a0983b1 test-data/points_moving.tsv --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/points_moving.tsv Sun Feb 20 15:46:58 2022 +0000 @@ -0,0 +1,6 @@ + x y +0 43 80 +1 184 88 +2 152 219 +3 224 166 +4 114 128 diff -r b0503eec7bd6 -r 4e089a0983b1 test-data/tmat.tsv --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/tmat.tsv Sun Feb 20 15:46:58 2022 +0000 @@ -0,0 +1,3 @@ +0.9473192798662091 0.2999824649666423 -31.90319646127108 +-0.28701210871295463 0.9448018442119746 43.47552520776674 +0.0 0.0 1.0