Repository 'xarray_metadata_info'
hg clone https://toolshed.g2.bx.psu.edu/repos/ecology/xarray_metadata_info

Changeset 5:00de53d18b99 (2022-07-31)
Previous changeset 4:9bbaab36a5d4 (2022-01-20)
Commit message:
planemo upload for repository https://github.com/galaxyecology/tools-ecology/tree/master/tools/data_manipulation/xarray/ commit fd8ad4d97db7b1fd3876ff63e14280474e06fdf7
modified:
macros.xml
test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0.png
test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0_title.png
test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time1.png
test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time50.png
test-data/version.tabular
xarray_metadata_info.xml
added:
macros_timeseries.xml
test-data/time_series.png
test-data/time_series.tabular
test-data/time_series_customized.png
test-data/time_series_customized.tabular
timeseries.py
xarray_info.py
xarray_select.py
removed:
xarray_tool.py
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 macros.xml
--- a/macros.xml Thu Jan 20 17:09:40 2022 +0000
+++ b/macros.xml Sun Jul 31 21:22:03 2022 +0000
b
@@ -1,5 +1,5 @@
 <macros>
-    <token name="@TOOL_VERSION@">0.20.2</token>
+    <token name="@TOOL_VERSION@">2022.3.0</token>
     <token name="@VERSION_SUFFIX@">0</token>
     <token name="@PROFILE@">20.05</token>
     <xml name="edam_ontology">
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 macros_timeseries.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/macros_timeseries.xml Sun Jul 31 21:22:03 2022 +0000
[
@@ -0,0 +1,47 @@
+<macros>
+    <xml name="config_series">
+        <configfiles>
+            <configfile name="series_customization"><![CDATA[
+{
+#if $condi_datetime.datetime=="yes"
+#if str($condi_datetime.time_name).strip()
+"time_name":'$condi_datetime.time_name',
+#end if
+#if str($condi_datetime.time_start_value).strip()
+"time_start_value":"$condi_datetime.time_start_value",
+#end if
+#if str($condi_datetime.time_end_value).strip()
+"time_end_value":"$condi_datetime.time_end_value",
+#end if
+#end if
+#if str($lon_value).strip()
+"lon_value":'$lon_value',
+#end if
+#if str($lat_value).strip()
+"lat_value":'$lat_value',
+#end if
+#if $lon_name
+"lon_name":'$lon_name',
+#end if
+#if $lat_name
+"lat_name":'$lat_name',
+#end if
+#if str($adv.format_date).strip()
+"format_date":'$adv.format_date',
+#end if
+#if str($adv.plot_title).strip()
+"title":'$adv.plot_title',
+#end if
+#if str($adv.xlabel).strip()
+"xlabel":'$adv.xlabel',
+#end if
+#if str($adv.ylabel).strip()
+"ylabel":'$adv.ylabel',
+#end if
+}
+            ]]>
+            </configfile>
+        </configfiles>
+   
+    </xml>
+</macros>
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0.png
b
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0_title.png
b
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0_title.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time1.png
b
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time1.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time50.png
b
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time50.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/time_series.png
b
Binary file test-data/time_series.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/time_series.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/time_series.tabular Sun Jul 31 21:22:03 2022 +0000
b
@@ -0,0 +1,38 @@
+time longitude latitude depth chl
+2010-12-15 00:00:00 -6.0000005 44.75 0.50576 0.31
+2011-01-15 00:00:00 -6.0000005 44.75 0.50576 0.37
+2011-02-15 00:00:00 -6.0000005 44.75 0.50576 0.81
+2011-03-15 00:00:00 -6.0000005 44.75 0.50576 1.41
+2011-04-15 00:00:00 -6.0000005 44.75 0.50576 1.8399999
+2011-05-15 00:00:00 -6.0000005 44.75 0.50576 0.099999994
+2011-06-15 00:00:00 -6.0000005 44.75 0.50576 0.03
+2011-07-15 00:00:00 -6.0000005 44.75 0.50576 0.03
+2011-08-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2011-09-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2011-10-15 00:00:00 -6.0000005 44.75 0.50576 0.02
+2011-11-15 00:00:00 -6.0000005 44.75 0.50576 0.07
+2011-12-15 00:00:00 -6.0000005 44.75 0.50576 0.34
+2012-01-15 00:00:00 -6.0000005 44.75 0.50576 0.35
+2012-02-15 00:00:00 -6.0000005 44.75 0.50576 0.37
+2012-03-15 00:00:00 -6.0000005 44.75 0.50576 1.5799999
+2012-04-15 00:00:00 -6.0000005 44.75 0.50576 1.12
+2012-05-15 00:00:00 -6.0000005 44.75 0.50576 1.16
+2012-06-15 00:00:00 -6.0000005 44.75 0.50576 0.07
+2012-07-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2012-08-15 00:00:00 -6.0000005 44.75 0.50576 0.02
+2012-09-15 00:00:00 -6.0000005 44.75 0.50576 0.03
+2012-10-15 00:00:00 -6.0000005 44.75 0.50576 0.22
+2012-11-15 00:00:00 -6.0000005 44.75 0.50576 0.34
+2012-12-15 00:00:00 -6.0000005 44.75 0.50576 0.29
+2013-01-15 00:00:00 -6.0000005 44.75 0.50576 0.37
+2013-02-15 00:00:00 -6.0000005 44.75 0.50576 0.38
+2013-03-15 00:00:00 -6.0000005 44.75 0.50576 1.15
+2013-04-15 00:00:00 -6.0000005 44.75 0.50576 1.9
+2013-05-15 00:00:00 -6.0000005 44.75 0.50576 0.5
+2013-06-15 00:00:00 -6.0000005 44.75 0.50576 0.12
+2013-07-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2013-08-15 00:00:00 -6.0000005 44.75 0.50576 0.0
+2013-09-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2013-10-15 00:00:00 -6.0000005 44.75 0.50576 0.01
+2013-11-15 00:00:00 -6.0000005 44.75 0.50576 0.12
+2013-12-15 00:00:00 -6.0000005 44.75 0.50576 0.34
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/time_series_customized.png
b
Binary file test-data/time_series_customized.png has changed
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/time_series_customized.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/time_series_customized.tabular Sun Jul 31 21:22:03 2022 +0000
b
@@ -0,0 +1,146 @@
+time longitude latitude depth chl
+2002-12-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2003-01-15 00:00:00 -5.0000005 43.5 0.50576 0.41
+2003-02-15 00:00:00 -5.0000005 43.5 0.50576 0.55
+2003-03-15 00:00:00 -5.0000005 43.5 0.50576 1.0699999
+2003-04-15 00:00:00 -5.0000005 43.5 0.50576 0.89
+2003-05-15 00:00:00 -5.0000005 43.5 0.50576 0.14
+2003-06-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2003-07-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2003-08-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2003-09-15 00:00:00 -5.0000005 43.5 0.50576 0.04
+2003-10-15 00:00:00 -5.0000005 43.5 0.50576 0.08
+2003-11-15 00:00:00 -5.0000005 43.5 0.50576 0.39
+2003-12-15 00:00:00 -5.0000005 43.5 0.50576 0.31
+2004-01-15 00:00:00 -5.0000005 43.5 0.50576 0.38
+2004-02-15 00:00:00 -5.0000005 43.5 0.50576 0.57
+2004-03-15 00:00:00 -5.0000005 43.5 0.50576 1.05
+2004-04-15 00:00:00 -5.0000005 43.5 0.50576 1.43
+2004-05-15 00:00:00 -5.0000005 43.5 0.50576 1.27
+2004-06-15 00:00:00 -5.0000005 43.5 0.50576 0.81
+2004-07-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2004-08-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2004-09-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2004-10-15 00:00:00 -5.0000005 43.5 0.50576 0.19999999
+2004-11-15 00:00:00 -5.0000005 43.5 0.50576 0.41
+2004-12-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2005-01-15 00:00:00 -5.0000005 43.5 0.50576 0.42
+2005-02-15 00:00:00 -5.0000005 43.5 0.50576 0.59
+2005-03-15 00:00:00 -5.0000005 43.5 0.50576 1.37
+2005-04-15 00:00:00 -5.0000005 43.5 0.50576 1.4399999
+2005-05-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2005-06-15 00:00:00 -5.0000005 43.5 0.50576 0.22999999
+2005-07-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2005-08-15 00:00:00 -5.0000005 43.5 0.50576 0.75
+2005-09-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2005-10-15 00:00:00 -5.0000005 43.5 0.50576 0.22999999
+2005-11-15 00:00:00 -5.0000005 43.5 0.50576 0.5
+2005-12-15 00:00:00 -5.0000005 43.5 0.50576 0.42
+2006-01-15 00:00:00 -5.0000005 43.5 0.50576 0.51
+2006-02-15 00:00:00 -5.0000005 43.5 0.50576 0.81
+2006-03-15 00:00:00 -5.0000005 43.5 0.50576 1.78
+2006-04-15 00:00:00 -5.0000005 43.5 0.50576 1.87
+2006-05-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2006-06-15 00:00:00 -5.0000005 43.5 0.50576 0.87
+2006-07-15 00:00:00 -5.0000005 43.5 0.50576 0.04
+2006-08-15 00:00:00 -5.0000005 43.5 0.50576 0.04
+2006-09-15 00:00:00 -5.0000005 43.5 0.50576 0.03
+2006-10-15 00:00:00 -5.0000005 43.5 0.50576 0.22
+2006-11-15 00:00:00 -5.0000005 43.5 0.50576 0.51
+2006-12-15 00:00:00 -5.0000005 43.5 0.50576 0.41
+2007-01-15 00:00:00 -5.0000005 43.5 0.50576 0.39999998
+2007-02-15 00:00:00 -5.0000005 43.5 0.50576 0.61
+2007-03-15 00:00:00 -5.0000005 43.5 0.50576 1.24
+2007-04-15 00:00:00 -5.0000005 43.5 0.50576 1.09
+2007-05-15 00:00:00 -5.0000005 43.5 0.50576 0.28
+2007-06-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2007-07-15 00:00:00 -5.0000005 43.5 0.50576 0.01
+2007-08-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2007-09-15 00:00:00 -5.0000005 43.5 0.50576 1.6899999
+2007-10-15 00:00:00 -5.0000005 43.5 0.50576 0.71999997
+2007-11-15 00:00:00 -5.0000005 43.5 0.50576 1.25
+2007-12-15 00:00:00 -5.0000005 43.5 0.50576 0.68
+2008-01-15 00:00:00 -5.0000005 43.5 0.50576 0.57
+2008-02-15 00:00:00 -5.0000005 43.5 0.50576 0.95
+2008-03-15 00:00:00 -5.0000005 43.5 0.50576 1.1
+2008-04-15 00:00:00 -5.0000005 43.5 0.50576 1.35
+2008-05-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2008-06-15 00:00:00 -5.0000005 43.5 0.50576 0.01
+2008-07-15 00:00:00 -5.0000005 43.5 0.50576 0.04
+2008-08-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2008-09-15 00:00:00 -5.0000005 43.5 0.50576 0.31
+2008-10-15 00:00:00 -5.0000005 43.5 0.50576 0.17
+2008-11-15 00:00:00 -5.0000005 43.5 0.50576 0.21
+2008-12-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2009-01-15 00:00:00 -5.0000005 43.5 0.50576 0.42999998
+2009-02-15 00:00:00 -5.0000005 43.5 0.50576 0.55
+2009-03-15 00:00:00 -5.0000005 43.5 0.50576 1.0
+2009-04-15 00:00:00 -5.0000005 43.5 0.50576 0.71999997
+2009-05-15 00:00:00 -5.0000005 43.5 0.50576 0.14
+2009-06-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2009-07-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2009-08-15 00:00:00 -5.0000005 43.5 0.50576 0.01
+2009-09-15 00:00:00 -5.0000005 43.5 0.50576 0.29
+2009-10-15 00:00:00 -5.0000005 43.5 0.50576 0.90999997
+2009-11-15 00:00:00 -5.0000005 43.5 0.50576 0.45
+2009-12-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2010-01-15 00:00:00 -5.0000005 43.5 0.50576 0.42999998
+2010-02-15 00:00:00 -5.0000005 43.5 0.50576 0.56
+2010-03-15 00:00:00 -5.0000005 43.5 0.50576 1.35
+2010-04-15 00:00:00 -5.0000005 43.5 0.50576 1.63
+2010-05-15 00:00:00 -5.0000005 43.5 0.50576 0.41
+2010-06-15 00:00:00 -5.0000005 43.5 0.50576 0.099999994
+2010-07-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2010-08-15 00:00:00 -5.0000005 43.5 0.50576 0.03
+2010-09-15 00:00:00 -5.0000005 43.5 0.50576 0.14
+2010-10-15 00:00:00 -5.0000005 43.5 0.50576 0.099999994
+2010-11-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2010-12-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2011-01-15 00:00:00 -5.0000005 43.5 0.50576 0.55
+2011-02-15 00:00:00 -5.0000005 43.5 0.50576 0.96999997
+2011-03-15 00:00:00 -5.0000005 43.5 0.50576 1.65
+2011-04-15 00:00:00 -5.0000005 43.5 0.50576 1.16
+2011-05-15 00:00:00 -5.0000005 43.5 0.50576 0.32
+2011-06-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2011-07-15 00:00:00 -5.0000005 43.5 0.50576 0.089999996
+2011-08-15 00:00:00 -5.0000005 43.5 0.50576 0.03
+2011-09-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2011-10-15 00:00:00 -5.0000005 43.5 0.50576 0.25
+2011-11-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2011-12-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2012-01-15 00:00:00 -5.0000005 43.5 0.50576 0.45
+2012-02-15 00:00:00 -5.0000005 43.5 0.50576 0.68
+2012-03-15 00:00:00 -5.0000005 43.5 0.50576 1.81
+2012-04-15 00:00:00 -5.0000005 43.5 0.50576 1.75
+2012-05-15 00:00:00 -5.0000005 43.5 0.50576 1.03
+2012-06-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2012-07-15 00:00:00 -5.0000005 43.5 0.50576 0.01
+2012-08-15 00:00:00 -5.0000005 43.5 0.50576 0.01
+2012-09-15 00:00:00 -5.0000005 43.5 0.50576 0.099999994
+2012-10-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2012-11-15 00:00:00 -5.0000005 43.5 0.50576 0.14
+2012-12-15 00:00:00 -5.0000005 43.5 0.50576 0.34
+2013-01-15 00:00:00 -5.0000005 43.5 0.50576 0.5
+2013-02-15 00:00:00 -5.0000005 43.5 0.50576 1.09
+2013-03-15 00:00:00 -5.0000005 43.5 0.50576 1.62
+2013-04-15 00:00:00 -5.0000005 43.5 0.50576 1.4
+2013-05-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2013-06-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2013-07-15 00:00:00 -5.0000005 43.5 0.50576 0.48
+2013-08-15 00:00:00 -5.0000005 43.5 0.50576 0.08
+2013-09-15 00:00:00 -5.0000005 43.5 0.50576 0.21
+2013-10-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2013-11-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2013-12-15 00:00:00 -5.0000005 43.5 0.50576 0.59
+2014-01-15 00:00:00 -5.0000005 43.5 0.50576 0.56
+2014-02-15 00:00:00 -5.0000005 43.5 0.50576 0.90999997
+2014-03-15 00:00:00 -5.0000005 43.5 0.50576 1.3299999
+2014-04-15 00:00:00 -5.0000005 43.5 0.50576 1.09
+2014-05-15 00:00:00 -5.0000005 43.5 0.50576 0.37
+2014-06-15 00:00:00 -5.0000005 43.5 0.50576 0.11
+2014-07-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2014-08-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2014-09-15 00:00:00 -5.0000005 43.5 0.50576 0.11
+2014-10-15 00:00:00 -5.0000005 43.5 0.50576 0.02
+2014-11-15 00:00:00 -5.0000005 43.5 0.50576 0.07
+2014-12-15 00:00:00 -5.0000005 43.5 0.50576 0.17
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 test-data/version.tabular
--- a/test-data/version.tabular Thu Jan 20 17:09:40 2022 +0000
+++ b/test-data/version.tabular Sun Jul 31 21:22:03 2022 +0000
b
@@ -1,1 +1,1 @@
-Galaxy xarray version 0.20.2
+Galaxy xarray version 2022.3.0
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 timeseries.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/timeseries.py Sun Jul 31 21:22:03 2022 +0000
[
@@ -0,0 +1,182 @@
+#!/usr/bin/env python3
+#
+#
+# usage:  netCDF_timeseries.py [-h] [--output output.png]
+#                               [--save timeseries.tabular]
+#                               [--config config-file]
+#                               [-v]
+#                               input varname
+# positional arguments:
+#  input            input filename with geographical coordinates (netCDF
+#                   format)
+#  varname          Specify which variable to extract (case sensitive)
+#
+# optional arguments:
+#  -h, --help                 show this help message and exit
+#  --output output.png        filename to store image (png format)
+#  --save timeseries.tabular  filename to store timeseries (tabular format)
+#  --config                   config file extract parameters
+#  -v, --verbose              switch on verbose mode
+#
+import argparse
+import ast
+import warnings
+
+import cftime  # noqa: F401
+
+import matplotlib as mpl
+mpl.use('Agg')
+
+import matplotlib.pyplot as plt   # noqa: I202,E402
+from matplotlib.dates import DateFormatter   # noqa: I202,E402
+
+import xarray as xr  # noqa: I202,E402
+
+
+class TimeSeries ():
+    def __init__(self, input, varname, output, save, verbose=False,
+                 config_file=""):
+
+        li = list(input.split(","))
+        if len(li) > 1:
+            self.input = li
+        else:
+            self.input = input
+
+        self.varname = varname
+        self.xylim_supported = True
+        if output == "" or output is None:
+            self.output = "Timeseries.png"
+        else:
+            self.output = output
+        if save == "" or save is None:
+            self.save = "Timeseries.tabular"
+        else:
+            self.save = save
+        self.verbose = verbose
+        self.time_start_value = ""
+        self.time_end_value = ""
+        self.lon_value = ""
+        self.lat_value = ""
+        self.lat_name = 'lat'
+        self.lon_name = 'lon'
+        self.time_name = 'time'
+        self.title = ''
+        self.xlabel = ''
+        self.ylabel = ''
+        self.format_date = ''
+        if config_file != "" and config_file is not None:
+            with open(config_file) as f:
+                sdict = ''.join(
+                    f.read().replace("\n", "").split('{')[1].split('}')[0]
+                    )
+                tmp = ast.literal_eval('{' + sdict.strip() + '}')
+                for key in tmp:
+                    if key == 'time_start_value':
+                        self.time_start_value = tmp[key]
+                    if key == 'time_end_value':
+                        self.time_end_value = tmp[key]
+                    if key == 'lon_value':
+                        self.lon_value = tmp[key]
+                    if key == 'lat_value':
+                        self.lat_value = tmp[key]
+                    if key == 'lon_name':
+                        self.lon_name = tmp[key]
+                    if key == 'lat_name':
+                        self.lat_name = tmp[key]
+                    if key == 'time_name':
+                        self.time_name = tmp[key]
+                    if key == 'title':
+                        self.title = tmp[key]
+                    if key == 'xlabel':
+                        self.xlabel = tmp[key]
+                    if key == 'ylabel':
+                        self.ylabel = tmp[key]
+                    if key == 'format_date':
+                        self.format_date = tmp[key]
+                        self.format_date = self.format_date.replace('X', '%')
+
+        if type(self.input) is list:
+            self.dset = xr.open_mfdataset(self.input, use_cftime=True)
+        else:
+            self.dset = xr.open_dataset(self.input, use_cftime=True)
+
+        if verbose:
+            print("input: ", self.input)
+            print("varname: ", self.varname)
+            if self.time_start_value:
+                print("time_start_value: ", self.time_start_value)
+            if self.time_end_value:
+                print("time_end_value: ", self.time_end_value)
+            print("output: ", self.output)
+            if self.lon_value:
+                print(self.lon_name, self.lon_value)
+            if self.lat_value:
+                print(self.lat_name, self.lat_value)
+
+    def plot(self):
+        if self.lon_value:
+            lon_c = float(self.lon_value)
+        if self.lat_value:
+            lat_c = float(self.lat_value)
+        if self.lat_value and self.lon_value:
+            self.df = self.dset.sel({self.lat_name: lat_c,
+                                     self.lon_name: lon_c},
+                                    method='nearest')
+        else:
+            self.df = self.dset
+        if self.time_start_value or self.time_end_value:
+            self.df = self.df.sel({self.time_name: slice(self.time_start_value,
+                                                         self.time_end_value)})
+        # Saving the time series into a tabular
+        self.df = self.df[self.varname].squeeze().to_dataframe()
+        self.df.dropna().to_csv(self.save, sep='\t')
+        # Plot the time series into png image
+        fig = plt.figure(figsize=(15, 5))
+        ax = plt.subplot(111)
+        self.df[self.varname].plot(ax=ax)
+        if self.title:
+            plt.title(self.title)
+        if self.xlabel:
+            plt.xlabel(self.xlabel)
+        if self.ylabel:
+            plt.ylabel(self.ylabel)
+        if self.format_date:
+            ax.xaxis.set_major_formatter(DateFormatter(self.format_date))
+        fig.tight_layout()
+        fig.savefig(self.output)
+
+
+if __name__ == '__main__':
+    warnings.filterwarnings("ignore")
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        'input',
+        help='input filename with geographical coordinates (netCDF format)'
+    )
+    parser.add_argument(
+        'varname',
+        help='Specify which variable to plot (case sensitive)'
+    )
+    parser.add_argument(
+        '--output',
+        help='output filename to store resulting image (png format)'
+    )
+    parser.add_argument(
+        '--save',
+        help='save resulting tabular file (tabular format) into filename'
+    )
+    parser.add_argument(
+        '--config',
+        help='pass timeseries parameters via a config file'
+    )
+    parser.add_argument(
+        "-v", "--verbose",
+        help="switch on verbose mode",
+        action="store_true")
+    args = parser.parse_args()
+
+    dset = TimeSeries(input=args.input, varname=args.varname,
+                      output=args.output, save=args.save, verbose=args.verbose,
+                      config_file=args.config)
+    dset.plot()
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 xarray_info.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_info.py Sun Jul 31 21:22:03 2022 +0000
[
@@ -0,0 +1,107 @@
+# xarray tool for:
+# - getting metadata information
+# - select data and save results in csv file for further post-processing
+
+import argparse
+import csv
+import os
+import warnings
+
+import xarray as xr
+
+
+class XarrayInfo ():
+    def __init__(self, infile, outfile_info="", outfile_summary="",
+                 verbose=False, coords_info=None):
+        self.infile = infile
+        self.outfile_info = outfile_info
+        self.outfile_summary = outfile_summary
+        self.coords_info = coords_info
+        self.verbose = verbose
+        # initialization
+        self.dset = None
+        self.gset = None
+        if self.verbose:
+            print("infile: ", self.infile)
+            print("outfile_info: ", self.outfile_info)
+            print("outfile_summary: ", self.outfile_summary)
+            print("coords_info: ", self.coords_info)
+
+    def info(self):
+        f = open(self.outfile_info, 'w')
+        ds = xr.open_dataset(self.infile)
+        ds.info(f)
+        f.close()
+
+    def summary(self):
+        f = open(self.outfile_summary, 'w')
+        ds = xr.open_dataset(self.infile)
+        writer = csv.writer(f, delimiter='\t')
+        header = ['VariableName', 'NumberOfDimensions']
+        for idx, val in enumerate(ds.dims.items()):
+            header.append('Dim' + str(idx) + 'Name')
+            header.append('Dim' + str(idx) + 'Size')
+        writer.writerow(header)
+        for name, da in ds.data_vars.items():
+            line = [name]
+            line.append(len(ds[name].shape))
+            for d, s in zip(da.shape, da.sizes):
+                line.append(s)
+                line.append(d)
+            writer.writerow(line)
+        for name, da in ds.coords.items():
+            line = [name]
+            line.append(len(ds[name].shape))
+            for d, s in zip(da.shape, da.sizes):
+                line.append(s)
+                line.append(d)
+            writer.writerow(line)
+        f.close()
+
+    def get_coords_info(self):
+        ds = xr.open_dataset(self.infile)
+        for c in ds.coords:
+            filename = os.path.join(self.coords_info,
+                                    c.strip() +
+                                    '.tabular')
+            pd = ds.coords[c].to_pandas()
+            pd.index = range(len(pd))
+            pd.to_csv(filename, header=False, sep='\t')
+
+
+if __name__ == '__main__':
+    warnings.filterwarnings("ignore")
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument(
+        'infile',
+        help='netCDF input filename'
+    )
+    parser.add_argument(
+        '--info',
+        help='Output filename where metadata information is stored'
+    )
+    parser.add_argument(
+        '--summary',
+        help='Output filename where data summary information is stored'
+    )
+    parser.add_argument(
+        '--coords_info',
+        help='output-folder where for each coordinate, coordinate values '
+             ' are being printed in the corresponding outputfile'
+    )
+    parser.add_argument(
+        "-v", "--verbose",
+        help="switch on verbose mode",
+        action="store_true"
+    )
+    args = parser.parse_args()
+
+    p = XarrayInfo(args.infile, args.info, args.summary,
+                   args.verbose, args.coords_info)
+    if args.info:
+        p.info()
+    elif args.coords_info:
+        p.get_coords_info()
+    if args.summary:
+        p.summary()
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 xarray_metadata_info.xml
--- a/xarray_metadata_info.xml Thu Jan 20 17:09:40 2022 +0000
+++ b/xarray_metadata_info.xml Sun Jul 31 21:22:03 2022 +0000
[
@@ -6,13 +6,12 @@
     <expand macro="edam_ontology"/>
     <requirements>
         <requirement type="package" version="@TOOL_VERSION@">xarray</requirement>
-        <requirement type="package" version="3">python</requirement>
-        <requirement type="package" version="1.5.6">netcdf4</requirement>
-        <requirement type="package" version="0.9.0">geopandas</requirement>
-        <requirement type="package" version="1.7.1">shapely</requirement>
+        <requirement type="package" version="3.10">python</requirement>
+        <requirement type="package" version="1.6.0">netcdf4</requirement>
+        <requirement type="package" version="1.4.3">pandas</requirement>
     </requirements>
     <command detect_errors="exit_code"><![CDATA[
-        python3 '$__tool_directory__/xarray_tool.py' '$input' --info '$info' --summary '$output'
+        python3 '$__tool_directory__/xarray_info.py' '$input' --info '$info' --summary '$output'
     ]]>    </command>
     <inputs>
         <param type="data" name="input" label="Netcdf file" format="netcdf,h5" help="Netcdf file you need information about."/>
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 xarray_select.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_select.py Sun Jul 31 21:22:03 2022 +0000
[
b'@@ -0,0 +1,294 @@\n+# xarray tool for:\r\n+# - getting metadata information\r\n+# - select data and save results in csv file for further post-processing\r\n+\r\n+import argparse\r\n+import os\r\n+import warnings\r\n+\r\n+import geopandas as gdp\r\n+\r\n+import pandas as pd\r\n+\r\n+from shapely.geometry import Point\r\n+from shapely.ops import nearest_points\r\n+\r\n+import xarray as xr\r\n+\r\n+\r\n+class XarraySelect ():\r\n+    def __init__(self, infile, select="", outfile="", outputdir="",\r\n+                 latname="", latvalN="", latvalS="", lonname="",\r\n+                 lonvalE="", lonvalW="", filter_list="", coords="",\r\n+                 time="", verbose=False, no_missing=False,\r\n+                 tolerance=None):\r\n+        self.infile = infile\r\n+        self.select = select\r\n+        self.outfile = outfile\r\n+        self.outputdir = outputdir\r\n+        self.latname = latname\r\n+        if tolerance != "" and tolerance is not None:\r\n+            self.tolerance = float(tolerance)\r\n+        else:\r\n+            self.tolerance = -1\r\n+        if latvalN != "" and latvalN is not None:\r\n+            self.latvalN = float(latvalN)\r\n+        else:\r\n+            self.latvalN = ""\r\n+        if latvalS != "" and latvalS is not None:\r\n+            self.latvalS = float(latvalS)\r\n+        else:\r\n+            self.latvalS = ""\r\n+        self.lonname = lonname\r\n+        if lonvalE != "" and lonvalE is not None:\r\n+            self.lonvalE = float(lonvalE)\r\n+        else:\r\n+            self.lonvalE = ""\r\n+        if lonvalW != "" and lonvalW is not None:\r\n+            self.lonvalW = float(lonvalW)\r\n+        else:\r\n+            self.lonvalW = ""\r\n+        self.filter = filter_list\r\n+        self.time = time\r\n+        self.coords = coords\r\n+        self.verbose = verbose\r\n+        self.no_missing = no_missing\r\n+        # initialization\r\n+        self.dset = None\r\n+        self.gset = None\r\n+        if self.verbose:\r\n+            print("infile: ", self.infile)\r\n+            print("outfile: ", self.outfile)\r\n+            print("select: ", self.select)\r\n+            print("outfile: ", self.outfile)\r\n+            print("outputdir: ", self.outputdir)\r\n+            print("latname: ", self.latname)\r\n+            print("latvalN: ", self.latvalN)\r\n+            print("latvalS: ", self.latvalS)\r\n+            print("lonname: ", self.lonname)\r\n+            print("lonvalE: ", self.lonvalE)\r\n+            print("lonvalW: ", self.lonvalW)\r\n+            print("filter: ", self.filter)\r\n+            print("time: ", self.time)\r\n+            print("coords: ", self.coords)\r\n+\r\n+    def rowfilter(self, single_filter):\r\n+        split_filter = single_filter.split(\'#\')\r\n+        filter_varname = split_filter[0]\r\n+        op = split_filter[1]\r\n+        ll = float(split_filter[2])\r\n+        if (op == \'bi\'):\r\n+            rl = float(split_filter[3])\r\n+        if filter_varname == self.select:\r\n+            # filter on values of the selected variable\r\n+            if op == \'bi\':\r\n+                self.dset = self.dset.where(\r\n+                     (self.dset <= rl) & (self.dset >= ll)\r\n+                     )\r\n+            elif op == \'le\':\r\n+                self.dset = self.dset.where(self.dset <= ll)\r\n+            elif op == \'ge\':\r\n+                self.dset = self.dset.where(self.dset >= ll)\r\n+            elif op == \'e\':\r\n+                self.dset = self.dset.where(self.dset == ll)\r\n+        else:  # filter on other dimensions of the selected variable\r\n+            if op == \'bi\':\r\n+                self.dset = self.dset.sel({filter_varname: slice(ll, rl)})\r\n+            elif op == \'le\':\r\n+                self.dset = self.dset.sel({filter_varname: slice(None, ll)})\r\n+            elif op == \'ge\':\r\n+                self.dset = self.dset.sel({filter_varname: slice(ll, None)})\r\n+            elif op == \'e\':\r\n+                self.dset = self.dset.sel({filter_varname: ll},\r\n+                                          method=\'nearest\')\r\n+\r\n+    def selection(self):\r\n+        if self.dset is None:\r\n+            sel'..b'to a mask that is the same for\r\n+        # all dimensions in the dataset.\r\n+        dsel_frame = self.dset\r\n+        for dim in self.dset.dims:\r\n+            if dim != self.latname and dim != self.lonname:\r\n+                dsel_frame = dsel_frame.isel({dim: 0})\r\n+        # transform to pandas dataframe\r\n+        dff = dsel_frame.to_dataframe().dropna().reset_index()\r\n+        # transform to geopandas to collocate\r\n+        gdf = gdp.GeoDataFrame(dff,\r\n+                               geometry=gdp.points_from_xy(dff[self.lonname],\r\n+                                                           dff[self.latname]))\r\n+        # Find nearest location where values are not null\r\n+        point = Point(self.lonvalE, self.latvalN)\r\n+        multipoint = gdf.geometry.unary_union\r\n+        queried_geom, nearest_geom = nearest_points(point, multipoint)\r\n+        self.nearest_latvalN = nearest_geom.y\r\n+        self.nearest_lonvalE = nearest_geom.x\r\n+\r\n+    def selection_from_coords(self):\r\n+        fcoords = pd.read_csv(self.coords, sep=\'\\t\')\r\n+        for row in fcoords.itertuples():\r\n+            self.latvalN = row[0]\r\n+            self.lonvalE = row[1]\r\n+            self.outfile = (os.path.join(self.outputdir,\r\n+                            self.select + \'_\' +\r\n+                            str(row.Index) + \'.tabular\'))\r\n+            self.selection()\r\n+\r\n+\r\n+if __name__ == \'__main__\':\r\n+    warnings.filterwarnings("ignore")\r\n+    parser = argparse.ArgumentParser()\r\n+\r\n+    parser.add_argument(\r\n+        \'infile\',\r\n+        help=\'netCDF input filename\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--select\',\r\n+        help=\'Variable name to select\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--latname\',\r\n+        help=\'Latitude name\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--latvalN\',\r\n+        help=\'North latitude value\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--latvalS\',\r\n+        help=\'South latitude value\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--lonname\',\r\n+        help=\'Longitude name\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--lonvalE\',\r\n+        help=\'East longitude value\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--lonvalW\',\r\n+        help=\'West longitude value\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--tolerance\',\r\n+        help=\'Maximum distance between original and selected value for \'\r\n+             \' inexact matches e.g. abs(index[indexer] - target) <= tolerance\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--coords\',\r\n+        help=\'Input file containing Latitude and Longitude\'\r\n+             \'for geographical selection\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--filter\',\r\n+        nargs="*",\r\n+        help=\'Filter list variable#operator#value_s#value_e\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--time\',\r\n+        help=\'select timeseries variable#operator#value_s[#value_e]\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--outfile\',\r\n+        help=\'csv outfile for storing results of the selection\'\r\n+             \'(valid only when --select)\'\r\n+    )\r\n+    parser.add_argument(\r\n+        \'--outputdir\',\r\n+        help=\'folder name for storing results with multiple selections\'\r\n+             \'(valid only when --select)\'\r\n+    )\r\n+    parser.add_argument(\r\n+        "-v", "--verbose",\r\n+        help="switch on verbose mode",\r\n+        action="store_true"\r\n+    )\r\n+    parser.add_argument(\r\n+        "--no_missing",\r\n+        help="""Do not take into account possible null/missing values\r\n+                (only valid for single location)""",\r\n+        action="store_true"\r\n+    )\r\n+    args = parser.parse_args()\r\n+\r\n+    p = XarraySelect(args.infile, args.select, args.outfile, args.outputdir,\r\n+                     args.latname, args.latvalN, args.latvalS, args.lonname,\r\n+                     args.lonvalE, args.lonvalW, args.filter,\r\n+                     args.coords, args.time, args.verbose,\r\n+                     args.no_missing, args.tolerance)\r\n+    if args.select:\r\n+        p.selection()\r\n'
b
diff -r 9bbaab36a5d4 -r 00de53d18b99 xarray_tool.py
--- a/xarray_tool.py Thu Jan 20 17:09:40 2022 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
[
b'@@ -1,365 +0,0 @@\n-# xarray tool for:\r\n-# - getting metadata information\r\n-# - select data and save results in csv file for further post-processing\r\n-\r\n-import argparse\r\n-import csv\r\n-import os\r\n-import warnings\r\n-\r\n-import geopandas as gdp\r\n-\r\n-import pandas as pd\r\n-\r\n-from shapely.geometry import Point\r\n-from shapely.ops import nearest_points\r\n-\r\n-import xarray as xr\r\n-\r\n-\r\n-class XarrayTool ():\r\n-    def __init__(self, infile, outfile_info="", outfile_summary="",\r\n-                 select="", outfile="", outputdir="", latname="",\r\n-                 latvalN="", latvalS="", lonname="", lonvalE="",\r\n-                 lonvalW="", filter_list="", coords="", time="",\r\n-                 verbose=False, no_missing=False, coords_info=None,\r\n-                 tolerance=None):\r\n-        self.infile = infile\r\n-        self.outfile_info = outfile_info\r\n-        self.outfile_summary = outfile_summary\r\n-        self.select = select\r\n-        self.outfile = outfile\r\n-        self.outputdir = outputdir\r\n-        self.latname = latname\r\n-        if tolerance != "" and tolerance is not None:\r\n-            self.tolerance = float(tolerance)\r\n-        else:\r\n-            self.tolerance = -1\r\n-        if latvalN != "" and latvalN is not None:\r\n-            self.latvalN = float(latvalN)\r\n-        else:\r\n-            self.latvalN = ""\r\n-        if latvalS != "" and latvalS is not None:\r\n-            self.latvalS = float(latvalS)\r\n-        else:\r\n-            self.latvalS = ""\r\n-        self.lonname = lonname\r\n-        if lonvalE != "" and lonvalE is not None:\r\n-            self.lonvalE = float(lonvalE)\r\n-        else:\r\n-            self.lonvalE = ""\r\n-        if lonvalW != "" and lonvalW is not None:\r\n-            self.lonvalW = float(lonvalW)\r\n-        else:\r\n-            self.lonvalW = ""\r\n-        self.filter = filter_list\r\n-        self.time = time\r\n-        self.coords = coords\r\n-        self.verbose = verbose\r\n-        self.no_missing = no_missing\r\n-        # initialization\r\n-        self.dset = None\r\n-        self.gset = None\r\n-        self.coords_info = coords_info\r\n-        if self.verbose:\r\n-            print("infile: ", self.infile)\r\n-            print("outfile_info: ", self.outfile_info)\r\n-            print("outfile_summary: ", self.outfile_summary)\r\n-            print("outfile: ", self.outfile)\r\n-            print("select: ", self.select)\r\n-            print("outfile: ", self.outfile)\r\n-            print("outputdir: ", self.outputdir)\r\n-            print("latname: ", self.latname)\r\n-            print("latvalN: ", self.latvalN)\r\n-            print("latvalS: ", self.latvalS)\r\n-            print("lonname: ", self.lonname)\r\n-            print("lonvalE: ", self.lonvalE)\r\n-            print("lonvalW: ", self.lonvalW)\r\n-            print("filter: ", self.filter)\r\n-            print("time: ", self.time)\r\n-            print("coords: ", self.coords)\r\n-            print("coords_info: ", self.coords_info)\r\n-\r\n-    def info(self):\r\n-        f = open(self.outfile_info, \'w\')\r\n-        ds = xr.open_dataset(self.infile)\r\n-        ds.info(f)\r\n-        f.close()\r\n-\r\n-    def summary(self):\r\n-        f = open(self.outfile_summary, \'w\')\r\n-        ds = xr.open_dataset(self.infile)\r\n-        writer = csv.writer(f, delimiter=\'\\t\')\r\n-        header = [\'VariableName\', \'NumberOfDimensions\']\r\n-        for idx, val in enumerate(ds.dims.items()):\r\n-            header.append(\'Dim\' + str(idx) + \'Name\')\r\n-            header.append(\'Dim\' + str(idx) + \'Size\')\r\n-        writer.writerow(header)\r\n-        for name, da in ds.data_vars.items():\r\n-            line = [name]\r\n-            line.append(len(ds[name].shape))\r\n-            for d, s in zip(da.shape, da.sizes):\r\n-                line.append(s)\r\n-                line.append(d)\r\n-            writer.writerow(line)\r\n-        for name, da in ds.coords.items():\r\n-            line = [name]\r\n-            line.append(len(ds[name].shape))\r\n-            for d, s in zip(da.shape, da.sizes):\r\n-                line.append(s)\r\n-    '..b'       self.lonvalE = row[1]\r\n-            self.outfile = (os.path.join(self.outputdir,\r\n-                            self.select + \'_\' +\r\n-                            str(row.Index) + \'.tabular\'))\r\n-            self.selection()\r\n-\r\n-    def get_coords_info(self):\r\n-        ds = xr.open_dataset(self.infile)\r\n-        for c in ds.coords:\r\n-            filename = os.path.join(self.coords_info,\r\n-                                    c.strip() +\r\n-                                    \'.tabular\')\r\n-            pd = ds.coords[c].to_pandas()\r\n-            pd.index = range(len(pd))\r\n-            pd.to_csv(filename, header=False, sep=\'\\t\')\r\n-\r\n-\r\n-if __name__ == \'__main__\':\r\n-    warnings.filterwarnings("ignore")\r\n-    parser = argparse.ArgumentParser()\r\n-\r\n-    parser.add_argument(\r\n-        \'infile\',\r\n-        help=\'netCDF input filename\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--info\',\r\n-        help=\'Output filename where metadata information is stored\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--summary\',\r\n-        help=\'Output filename where data summary information is stored\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--select\',\r\n-        help=\'Variable name to select\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--latname\',\r\n-        help=\'Latitude name\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--latvalN\',\r\n-        help=\'North latitude value\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--latvalS\',\r\n-        help=\'South latitude value\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--lonname\',\r\n-        help=\'Longitude name\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--lonvalE\',\r\n-        help=\'East longitude value\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--lonvalW\',\r\n-        help=\'West longitude value\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--tolerance\',\r\n-        help=\'Maximum distance between original and selected value for \'\r\n-             \' inexact matches e.g. abs(index[indexer] - target) <= tolerance\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--coords\',\r\n-        help=\'Input file containing Latitude and Longitude\'\r\n-             \'for geographical selection\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--coords_info\',\r\n-        help=\'output-folder where for each coordinate, coordinate values \'\r\n-             \' are being printed in the corresponding outputfile\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--filter\',\r\n-        nargs="*",\r\n-        help=\'Filter list variable#operator#value_s#value_e\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--time\',\r\n-        help=\'select timeseries variable#operator#value_s[#value_e]\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--outfile\',\r\n-        help=\'csv outfile for storing results of the selection\'\r\n-             \'(valid only when --select)\'\r\n-    )\r\n-    parser.add_argument(\r\n-        \'--outputdir\',\r\n-        help=\'folder name for storing results with multiple selections\'\r\n-             \'(valid only when --select)\'\r\n-    )\r\n-    parser.add_argument(\r\n-        "-v", "--verbose",\r\n-        help="switch on verbose mode",\r\n-        action="store_true"\r\n-    )\r\n-    parser.add_argument(\r\n-        "--no_missing",\r\n-        help="""Do not take into account possible null/missing values\r\n-                (only valid for single location)""",\r\n-        action="store_true"\r\n-    )\r\n-    args = parser.parse_args()\r\n-\r\n-    p = XarrayTool(args.infile, args.info, args.summary, args.select,\r\n-                   args.outfile, args.outputdir, args.latname,\r\n-                   args.latvalN, args.latvalS, args.lonname,\r\n-                   args.lonvalE, args.lonvalW, args.filter,\r\n-                   args.coords, args.time, args.verbose,\r\n-                   args.no_missing, args.coords_info, args.tolerance)\r\n-    if args.info:\r\n-        p.info()\r\n-    if args.summary:\r\n-        p.summary()\r\n-    if args.coords:\r\n-        p.selection_from_coords()\r\n-    elif args.select:\r\n-        p.selection()\r\n-    elif args.coords_info:\r\n-        p.get_coords_info()\r\n'