# HG changeset patch
# User ecology
# Date 1622969443 0
# Node ID fea8a53f809913d6639a462aa2c1d8462fbaa9c5
"planemo upload for repository https://github.com/galaxyecology/tools-ecology/tree/master/tools/data_manipulation/xarray/ commit 57b6d23e3734d883e71081c78e77964d61be82ba"
diff -r 000000000000 -r fea8a53f8099 README.md
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README.md Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,8 @@
+# Xarray tools for netCDF
+## netCDF metadata information
+
+The first tool `xarray_metadata_info ` uses xarray to provide users with general information about variable names, dimensions
+and attributes.
+Variables that can be extracted and dimensions available are printed in a tabular file.
+
+The tool also print a general information file. It's the result of the xarray method info().
diff -r 000000000000 -r fea8a53f8099 macros.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/macros.xml Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,185 @@
+
+ 0.18.2
+ 0
+
+
+ topic_0610
+ topic_3050
+
+
+
+
+
+ @article{hoyer2017xarray,
+ title = {xarray: {N-D} labeled arrays and datasets in {Python}},
+ author = {Hoyer, S. and J. Hamman},
+ journal = {Journal of Open Research Software},
+ volume = {5},
+ number = {1},
+ year = {2017},
+ publisher = {Ubiquity Press},
+ doi = {10.5334/jors.148},
+ url = {http://doi.org/10.5334/jors.148}
+ }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff -r 000000000000 -r fea8a53f8099 test-data/Metadata_infos_from_dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc.Variables.tab
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/Metadata_infos_from_dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc.Variables.tab Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,8 @@
+VariableName NumberOfDimensions Dim0Name Dim0Size Dim1Name Dim1Size Dim2Name Dim2Size Dim3Name Dim3Size
+phy 4 time 145 depth 1 latitude 97 longitude 103
+chl 4 time 145 depth 1 latitude 97 longitude 103
+nh4 4 time 145 depth 1 latitude 97 longitude 103
+time 1 time 145
+longitude 1 longitude 103
+latitude 1 latitude 97
+depth 1 depth 1
diff -r 000000000000 -r fea8a53f8099 test-data/Test1.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/Test1.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,146 @@
+ time depth longitude latitude phy
+0 2002-12-15 0.5057600140571594 -2.0000007 44.0 1.0500183
+1 2003-01-15 0.5057600140571594 -2.0000007 44.0 1.25
+2 2003-02-15 0.5057600140571594 -2.0000007 44.0 1.3000183
+3 2003-03-15 0.5057600140571594 -2.0000007 44.0 6.0599976
+4 2003-04-15 0.5057600140571594 -2.0000007 44.0 2.25
+5 2003-05-15 0.5057600140571594 -2.0000007 44.0 0.6499939
+6 2003-06-15 0.5057600140571594 -2.0000007 44.0 0.42999268
+7 2003-07-15 0.5057600140571594 -2.0000007 44.0 0.42999268
+8 2003-08-15 0.5057600140571594 -2.0000007 44.0 0.480011
+9 2003-09-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+10 2003-10-15 0.5057600140571594 -2.0000007 44.0 0.5
+11 2003-11-15 0.5057600140571594 -2.0000007 44.0 0.9299927
+12 2003-12-15 0.5057600140571594 -2.0000007 44.0 1.3900146
+13 2004-01-15 0.5057600140571594 -2.0000007 44.0 1.7400208
+14 2004-02-15 0.5057600140571594 -2.0000007 44.0 4.5
+15 2004-03-15 0.5057600140571594 -2.0000007 44.0 5.5500183
+16 2004-04-15 0.5057600140571594 -2.0000007 44.0 5.3099976
+17 2004-05-15 0.5057600140571594 -2.0000007 44.0 3.75
+18 2004-06-15 0.5057600140571594 -2.0000007 44.0 0.77001953
+19 2004-07-15 0.5057600140571594 -2.0000007 44.0 0.5
+20 2004-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+21 2004-09-15 0.5057600140571594 -2.0000007 44.0 0.4500122
+22 2004-10-15 0.5057600140571594 -2.0000007 44.0 0.480011
+23 2004-11-15 0.5057600140571594 -2.0000007 44.0 0.83999634
+24 2004-12-15 0.5057600140571594 -2.0000007 44.0 1.7400208
+25 2005-01-15 0.5057600140571594 -2.0000007 44.0 1.7700195
+26 2005-02-15 0.5057600140571594 -2.0000007 44.0 1.5500183
+27 2005-03-15 0.5057600140571594 -2.0000007 44.0 7.149994
+28 2005-04-15 0.5057600140571594 -2.0000007 44.0 3.649994
+29 2005-05-15 0.5057600140571594 -2.0000007 44.0 2.5200195
+30 2005-06-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+31 2005-07-15 0.5057600140571594 -2.0000007 44.0 0.6700134
+32 2005-08-15 0.5057600140571594 -2.0000007 44.0 0.4500122
+33 2005-09-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+34 2005-10-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+35 2005-11-15 0.5057600140571594 -2.0000007 44.0 0.6199951
+36 2005-12-15 0.5057600140571594 -2.0000007 44.0 1.1499939
+37 2006-01-15 0.5057600140571594 -2.0000007 44.0 3.5299988
+38 2006-02-15 0.5057600140571594 -2.0000007 44.0 7.1799927
+39 2006-03-15 0.5057600140571594 -2.0000007 44.0 6.5599976
+40 2006-04-15 0.5057600140571594 -2.0000007 44.0 3.8000183
+41 2006-05-15 0.5057600140571594 -2.0000007 44.0 0.95999146
+42 2006-06-15 0.5057600140571594 -2.0000007 44.0 1.5
+43 2006-07-15 0.5057600140571594 -2.0000007 44.0 1.0299988
+44 2006-08-15 0.5057600140571594 -2.0000007 44.0 0.480011
+45 2006-09-15 0.5057600140571594 -2.0000007 44.0 0.49002075
+46 2006-10-15 0.5057600140571594 -2.0000007 44.0 0.480011
+47 2006-11-15 0.5057600140571594 -2.0000007 44.0 0.9299927
+48 2006-12-15 0.5057600140571594 -2.0000007 44.0 1.2099915
+49 2007-01-15 0.5057600140571594 -2.0000007 44.0 1.1499939
+50 2007-02-15 0.5057600140571594 -2.0000007 44.0 1.7000122
+51 2007-03-15 0.5057600140571594 -2.0000007 44.0 5.230011
+52 2007-04-15 0.5057600140571594 -2.0000007 44.0 3.8600159
+53 2007-05-15 0.5057600140571594 -2.0000007 44.0 0.83999634
+54 2007-06-15 0.5057600140571594 -2.0000007 44.0 0.6799927
+55 2007-07-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+56 2007-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+57 2007-09-15 0.5057600140571594 -2.0000007 44.0 0.5
+58 2007-10-15 0.5057600140571594 -2.0000007 44.0 0.89001465
+59 2007-11-15 0.5057600140571594 -2.0000007 44.0 2.0299988
+60 2007-12-15 0.5057600140571594 -2.0000007 44.0 1.8399963
+61 2008-01-15 0.5057600140571594 -2.0000007 44.0 1.3399963
+62 2008-02-15 0.5057600140571594 -2.0000007 44.0 3.149994
+63 2008-03-15 0.5057600140571594 -2.0000007 44.0 4.5899963
+64 2008-04-15 0.5057600140571594 -2.0000007 44.0 5.080017
+65 2008-05-15 0.5057600140571594 -2.0000007 44.0 1.0
+66 2008-06-15 0.5057600140571594 -2.0000007 44.0 1.5299988
+67 2008-07-15 0.5057600140571594 -2.0000007 44.0 0.55999756
+68 2008-08-15 0.5057600140571594 -2.0000007 44.0 0.42999268
+69 2008-09-15 0.5057600140571594 -2.0000007 44.0 0.42999268
+70 2008-10-15 0.5057600140571594 -2.0000007 44.0 0.42999268
+71 2008-11-15 0.5057600140571594 -2.0000007 44.0 0.64001465
+72 2008-12-15 0.5057600140571594 -2.0000007 44.0 2.4200134
+73 2009-01-15 0.5057600140571594 -2.0000007 44.0 2.3900146
+74 2009-02-15 0.5057600140571594 -2.0000007 44.0 6.2099915
+75 2009-03-15 0.5057600140571594 -2.0000007 44.0 4.6799927
+76 2009-04-15 0.5057600140571594 -2.0000007 44.0 1.1100159
+77 2009-05-15 0.5057600140571594 -2.0000007 44.0 2.649994
+78 2009-06-15 0.5057600140571594 -2.0000007 44.0 1.4900208
+79 2009-07-15 0.5057600140571594 -2.0000007 44.0 0.5
+80 2009-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+81 2009-09-15 0.5057600140571594 -2.0000007 44.0 0.5800171
+82 2009-10-15 0.5057600140571594 -2.0000007 44.0 0.6499939
+83 2009-11-15 0.5057600140571594 -2.0000007 44.0 0.8999939
+84 2009-12-15 0.5057600140571594 -2.0000007 44.0 1.3099976
+85 2010-01-15 0.5057600140571594 -2.0000007 44.0 1.5299988
+86 2010-02-15 0.5057600140571594 -2.0000007 44.0 2.9599915
+87 2010-03-15 0.5057600140571594 -2.0000007 44.0 5.450012
+88 2010-04-15 0.5057600140571594 -2.0000007 44.0 7.5899963
+89 2010-05-15 0.5057600140571594 -2.0000007 44.0 1.8000183
+90 2010-06-15 0.5057600140571594 -2.0000007 44.0 0.480011
+91 2010-07-15 0.5057600140571594 -2.0000007 44.0 0.5
+92 2010-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+93 2010-09-15 0.5057600140571594 -2.0000007 44.0 0.49002075
+94 2010-10-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+95 2010-11-15 0.5057600140571594 -2.0000007 44.0 0.9299927
+96 2010-12-15 0.5057600140571594 -2.0000007 44.0 1.1499939
+97 2011-01-15 0.5057600140571594 -2.0000007 44.0 2.4900208
+98 2011-02-15 0.5057600140571594 -2.0000007 44.0 5.1799927
+99 2011-03-15 0.5057600140571594 -2.0000007 44.0 7.029999
+100 2011-04-15 0.5057600140571594 -2.0000007 44.0 2.4900208
+101 2011-05-15 0.5057600140571594 -2.0000007 44.0 0.6499939
+102 2011-06-15 0.5057600140571594 -2.0000007 44.0 0.52001953
+103 2011-07-15 0.5057600140571594 -2.0000007 44.0 0.5
+104 2011-08-15 0.5057600140571594 -2.0000007 44.0 0.75
+105 2011-09-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+106 2011-10-15 0.5057600140571594 -2.0000007 44.0 0.480011
+107 2011-11-15 0.5057600140571594 -2.0000007 44.0 0.730011
+108 2011-12-15 0.5057600140571594 -2.0000007 44.0 1.0299988
+109 2012-01-15 0.5057600140571594 -2.0000007 44.0 3.149994
+110 2012-02-15 0.5057600140571594 -2.0000007 44.0 2.3099976
+111 2012-03-15 0.5057600140571594 -2.0000007 44.0 5.5200195
+112 2012-04-15 0.5057600140571594 -2.0000007 44.0 3.399994
+113 2012-05-15 0.5057600140571594 -2.0000007 44.0 3.7000122
+114 2012-06-15 0.5057600140571594 -2.0000007 44.0 2.5899963
+115 2012-07-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+116 2012-08-15 0.5057600140571594 -2.0000007 44.0 0.4500122
+117 2012-09-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+118 2012-10-15 0.5057600140571594 -2.0000007 44.0 0.61001587
+119 2012-11-15 0.5057600140571594 -2.0000007 44.0 2.0299988
+120 2012-12-15 0.5057600140571594 -2.0000007 44.0 1.4200134
+121 2013-01-15 0.5057600140571594 -2.0000007 44.0 2.2700195
+122 2013-02-15 0.5057600140571594 -2.0000007 44.0 7.0
+123 2013-03-15 0.5057600140571594 -2.0000007 44.0 10.550018
+124 2013-04-15 0.5057600140571594 -2.0000007 44.0 5.8399963
+125 2013-05-15 0.5057600140571594 -2.0000007 44.0 1.2400208
+126 2013-06-15 0.5057600140571594 -2.0000007 44.0 4.1700134
+127 2013-07-15 0.5057600140571594 -2.0000007 44.0 3.2099915
+128 2013-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+129 2013-09-15 0.5057600140571594 -2.0000007 44.0 0.480011
+130 2013-10-15 0.5057600140571594 -2.0000007 44.0 0.49002075
+131 2013-11-15 0.5057600140571594 -2.0000007 44.0 0.7799988
+132 2013-12-15 0.5057600140571594 -2.0000007 44.0 1.4500122
+133 2014-01-15 0.5057600140571594 -2.0000007 44.0 0.95999146
+134 2014-02-15 0.5057600140571594 -2.0000007 44.0 1.3900146
+135 2014-03-15 0.5057600140571594 -2.0000007 44.0 5.779999
+136 2014-04-15 0.5057600140571594 -2.0000007 44.0 5.4299927
+137 2014-05-15 0.5057600140571594 -2.0000007 44.0 1.1799927
+138 2014-06-15 0.5057600140571594 -2.0000007 44.0 0.730011
+139 2014-07-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+140 2014-08-15 0.5057600140571594 -2.0000007 44.0 0.45999146
+141 2014-09-15 0.5057600140571594 -2.0000007 44.0 0.5
+142 2014-10-15 0.5057600140571594 -2.0000007 44.0 0.6199951
+143 2014-11-15 0.5057600140571594 -2.0000007 44.0 0.480011
+144 2014-12-15 0.5057600140571594 -2.0000007 44.0 0.55999756
diff -r 000000000000 -r fea8a53f8099 test-data/Test2.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/Test2.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,25 @@
+ time depth latitude longitude nh4
+0 2003-12-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 81.27
+1 2003-12-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 78.08
+2 2003-12-15 0.5057600140571594 45.5 -0.9166674017906189 55.149998
+3 2004-01-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 65.2
+4 2004-01-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 64.11
+5 2004-02-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 51.0
+6 2004-02-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 51.32
+7 2004-05-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 54.53
+8 2004-06-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 79.79
+9 2004-06-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 61.52
+10 2004-07-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 99.159996
+11 2004-07-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 77.93
+12 2004-08-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 110.149994
+13 2004-08-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 86.759995
+14 2004-09-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 112.369995
+15 2004-09-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 91.979996
+16 2004-10-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 109.63
+17 2004-10-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 95.509995
+18 2004-11-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 98.45
+19 2004-11-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 93.11
+20 2004-11-15 0.5057600140571594 45.5 -0.9166674017906189 56.78
+21 2004-12-15 0.5057600140571594 45.166664123535156 -0.6666674017906189 84.25
+22 2004-12-15 0.5057600140571594 45.416664123535156 -0.8333340883255005 81.83
+23 2004-12-15 0.5057600140571594 45.5 -0.9166674017906189 57.07
diff -r 000000000000 -r fea8a53f8099 test-data/all.netcdf
Binary file test-data/all.netcdf has changed
diff -r 000000000000 -r fea8a53f8099 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc has changed
diff -r 000000000000 -r fea8a53f8099 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0.png
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time0.png has changed
diff -r 000000000000 -r fea8a53f8099 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time1.png
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time1.png has changed
diff -r 000000000000 -r fea8a53f8099 test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time50.png
Binary file test-data/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133_time50.png has changed
diff -r 000000000000 -r fea8a53f8099 test-data/depth.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/depth.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,1 @@
+0 0.50576
diff -r 000000000000 -r fea8a53f8099 test-data/info_file.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/info_file.txt Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,74 @@
+xarray.Dataset {
+dimensions:
+ depth = 1 ;
+ latitude = 97 ;
+ longitude = 103 ;
+ time = 145 ;
+
+variables:
+ float32 phy(time, depth, latitude, longitude) ;
+ phy:_CoordinateAxes = time depth latitude longitude ;
+ phy:long_name = Mole Concentration of Phytoplankton expressed as carbon in sea water ;
+ phy:standard_name = mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water ;
+ phy:units = mmol.m-3 ;
+ phy:unit_long = mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water ;
+ datetime64[ns] time(time) ;
+ time:standard_name = time ;
+ time:long_name = time ;
+ time:_CoordinateAxisType = Time ;
+ time:axis = T ;
+ float32 chl(time, depth, latitude, longitude) ;
+ chl:_CoordinateAxes = time depth latitude longitude ;
+ chl:long_name = Mass Concentration of Chlorophyll in Sea Water ;
+ chl:standard_name = mass_concentration_of_chlorophyll_in_sea_water ;
+ chl:units = mg.m-3 ;
+ chl:unit_long = milligram of chlorophyll per cubic meter ;
+ float32 nh4(time, depth, latitude, longitude) ;
+ nh4:_CoordinateAxes = time depth latitude longitude ;
+ nh4:long_name = Mole Concentration of Ammonium in Sea Water ;
+ nh4:standard_name = mole_concentration_of_ammonium_in_sea_water ;
+ nh4:units = mmol.m-3 ;
+ nh4:unit_long = millimoles of Ammonium per cubic meter ;
+ float32 longitude(longitude) ;
+ longitude:long_name = Longitude ;
+ longitude:units = degrees_east ;
+ longitude:standard_name = longitude ;
+ longitude:axis = X ;
+ longitude:unit_long = Degrees East ;
+ longitude:step = 0.08333f ;
+ longitude:_CoordinateAxisType = Lon ;
+ float32 latitude(latitude) ;
+ latitude:long_name = Latitude ;
+ latitude:units = degrees_north ;
+ latitude:standard_name = latitude ;
+ latitude:axis = Y ;
+ latitude:unit_long = Degrees North ;
+ latitude:step = 0.08333f ;
+ latitude:_CoordinateAxisType = Lat ;
+ float32 depth(depth) ;
+ depth:long_name = Depth ;
+ depth:units = m ;
+ depth:axis = Z ;
+ depth:positive = down ;
+ depth:unit_long = Meters ;
+ depth:standard_name = depth ;
+ depth:_CoordinateAxisType = Height ;
+ depth:_CoordinateZisPositive = down ;
+
+// global attributes:
+ :title = CMEMS IBI REANALYSIS: MONTHLY BIOGEOCHEMICAL PRODUCTS (REGULAR GRID) ;
+ :institution = Puertos del Estado (PdE) - Mercator-Ocean (MO) ;
+ :references = http://marine.copernicus.eu ;
+ :source = CMEMS IBI-MFC ;
+ :Conventions = CF-1.0 ;
+ :history = Data extracted from dataset http://puertos2.cesga.es:8080/thredds/dodsC/dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid ;
+ :time_min = 7272.0 ;
+ :time_max = 112464.0 ;
+ :julian_day_unit = Hours since 2002-02-15 ;
+ :z_min = 0.5057600140571594 ;
+ :z_max = 0.5057600140571594 ;
+ :latitude_min = 43.0 ;
+ :latitude_max = 51.0 ;
+ :longitude_min = -6.000000476837158 ;
+ :longitude_max = 2.4999990463256836 ;
+}
\ No newline at end of file
diff -r 000000000000 -r fea8a53f8099 test-data/latitude.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/latitude.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,97 @@
+0 43.0
+1 43.083332
+2 43.166664
+3 43.25
+4 43.333332
+5 43.416664
+6 43.5
+7 43.583332
+8 43.666664
+9 43.75
+10 43.833332
+11 43.916664
+12 44.0
+13 44.083332
+14 44.166664
+15 44.25
+16 44.333332
+17 44.416664
+18 44.5
+19 44.583332
+20 44.666664
+21 44.75
+22 44.833332
+23 44.916664
+24 45.0
+25 45.083332
+26 45.166664
+27 45.25
+28 45.333332
+29 45.416664
+30 45.5
+31 45.583332
+32 45.666664
+33 45.75
+34 45.833332
+35 45.916664
+36 46.0
+37 46.083332
+38 46.166664
+39 46.25
+40 46.333332
+41 46.416664
+42 46.5
+43 46.583332
+44 46.666664
+45 46.75
+46 46.833332
+47 46.916664
+48 47.0
+49 47.083332
+50 47.166664
+51 47.25
+52 47.333332
+53 47.416664
+54 47.5
+55 47.583332
+56 47.666664
+57 47.75
+58 47.833332
+59 47.916664
+60 48.0
+61 48.083332
+62 48.166664
+63 48.25
+64 48.333332
+65 48.416664
+66 48.5
+67 48.583332
+68 48.666664
+69 48.75
+70 48.833332
+71 48.916664
+72 49.0
+73 49.083332
+74 49.166664
+75 49.25
+76 49.333332
+77 49.416664
+78 49.5
+79 49.583332
+80 49.666664
+81 49.75
+82 49.833332
+83 49.916664
+84 50.0
+85 50.083332
+86 50.166664
+87 50.25
+88 50.333332
+89 50.416664
+90 50.5
+91 50.583332
+92 50.666664
+93 50.75
+94 50.833332
+95 50.916664
+96 51.0
diff -r 000000000000 -r fea8a53f8099 test-data/longitude.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/longitude.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,103 @@
+0 -6.0000005
+1 -5.916667
+2 -5.833334
+3 -5.7500005
+4 -5.666667
+5 -5.583334
+6 -5.5000005
+7 -5.416667
+8 -5.333334
+9 -5.2500005
+10 -5.166667
+11 -5.083334
+12 -5.0000005
+13 -4.9166675
+14 -4.833334
+15 -4.7500005
+16 -4.6666675
+17 -4.583334
+18 -4.5000005
+19 -4.4166675
+20 -4.333334
+21 -4.2500005
+22 -4.1666675
+23 -4.083334
+24 -4.0000005
+25 -3.9166672
+26 -3.833334
+27 -3.7500007
+28 -3.6666672
+29 -3.583334
+30 -3.5000007
+31 -3.4166672
+32 -3.333334
+33 -3.2500007
+34 -3.1666672
+35 -3.083334
+36 -3.0000007
+37 -2.9166672
+38 -2.833334
+39 -2.7500007
+40 -2.6666672
+41 -2.583334
+42 -2.5000007
+43 -2.4166672
+44 -2.333334
+45 -2.2500007
+46 -2.1666672
+47 -2.083334
+48 -2.0000007
+49 -1.9166673
+50 -1.833334
+51 -1.7500007
+52 -1.6666673
+53 -1.5833341
+54 -1.5000007
+55 -1.4166673
+56 -1.3333341
+57 -1.2500007
+58 -1.1666673
+59 -1.0833341
+60 -1.0000007
+61 -0.9166674
+62 -0.8333341
+63 -0.7500007
+64 -0.6666674
+65 -0.5833341
+66 -0.5000007
+67 -0.4166674
+68 -0.3333341
+69 -0.25000075
+70 -0.16666742
+71 -0.08333409
+72 -7.6e-07
+73 0.08333257
+74 0.1666659
+75 0.24999923
+76 0.33333257
+77 0.41666588
+78 0.49999923
+79 0.58333254
+80 0.66666585
+81 0.7499992
+82 0.83333254
+83 0.91666585
+84 0.9999992
+85 1.0833325
+86 1.1666659
+87 1.2499992
+88 1.3333325
+89 1.4166658
+90 1.4999992
+91 1.5833325
+92 1.6666658
+93 1.7499992
+94 1.8333325
+95 1.9166658
+96 1.9999992
+97 2.0833325
+98 2.1666658
+99 2.249999
+100 2.3333325
+101 2.4166658
+102 2.499999
diff -r 000000000000 -r fea8a53f8099 test-data/small.netcdf
Binary file test-data/small.netcdf has changed
diff -r 000000000000 -r fea8a53f8099 test-data/time.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/time.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,145 @@
+0 2002-12-15
+1 2003-01-15
+2 2003-02-15
+3 2003-03-15
+4 2003-04-15
+5 2003-05-15
+6 2003-06-15
+7 2003-07-15
+8 2003-08-15
+9 2003-09-15
+10 2003-10-15
+11 2003-11-15
+12 2003-12-15
+13 2004-01-15
+14 2004-02-15
+15 2004-03-15
+16 2004-04-15
+17 2004-05-15
+18 2004-06-15
+19 2004-07-15
+20 2004-08-15
+21 2004-09-15
+22 2004-10-15
+23 2004-11-15
+24 2004-12-15
+25 2005-01-15
+26 2005-02-15
+27 2005-03-15
+28 2005-04-15
+29 2005-05-15
+30 2005-06-15
+31 2005-07-15
+32 2005-08-15
+33 2005-09-15
+34 2005-10-15
+35 2005-11-15
+36 2005-12-15
+37 2006-01-15
+38 2006-02-15
+39 2006-03-15
+40 2006-04-15
+41 2006-05-15
+42 2006-06-15
+43 2006-07-15
+44 2006-08-15
+45 2006-09-15
+46 2006-10-15
+47 2006-11-15
+48 2006-12-15
+49 2007-01-15
+50 2007-02-15
+51 2007-03-15
+52 2007-04-15
+53 2007-05-15
+54 2007-06-15
+55 2007-07-15
+56 2007-08-15
+57 2007-09-15
+58 2007-10-15
+59 2007-11-15
+60 2007-12-15
+61 2008-01-15
+62 2008-02-15
+63 2008-03-15
+64 2008-04-15
+65 2008-05-15
+66 2008-06-15
+67 2008-07-15
+68 2008-08-15
+69 2008-09-15
+70 2008-10-15
+71 2008-11-15
+72 2008-12-15
+73 2009-01-15
+74 2009-02-15
+75 2009-03-15
+76 2009-04-15
+77 2009-05-15
+78 2009-06-15
+79 2009-07-15
+80 2009-08-15
+81 2009-09-15
+82 2009-10-15
+83 2009-11-15
+84 2009-12-15
+85 2010-01-15
+86 2010-02-15
+87 2010-03-15
+88 2010-04-15
+89 2010-05-15
+90 2010-06-15
+91 2010-07-15
+92 2010-08-15
+93 2010-09-15
+94 2010-10-15
+95 2010-11-15
+96 2010-12-15
+97 2011-01-15
+98 2011-02-15
+99 2011-03-15
+100 2011-04-15
+101 2011-05-15
+102 2011-06-15
+103 2011-07-15
+104 2011-08-15
+105 2011-09-15
+106 2011-10-15
+107 2011-11-15
+108 2011-12-15
+109 2012-01-15
+110 2012-02-15
+111 2012-03-15
+112 2012-04-15
+113 2012-05-15
+114 2012-06-15
+115 2012-07-15
+116 2012-08-15
+117 2012-09-15
+118 2012-10-15
+119 2012-11-15
+120 2012-12-15
+121 2013-01-15
+122 2013-02-15
+123 2013-03-15
+124 2013-04-15
+125 2013-05-15
+126 2013-06-15
+127 2013-07-15
+128 2013-08-15
+129 2013-09-15
+130 2013-10-15
+131 2013-11-15
+132 2013-12-15
+133 2014-01-15
+134 2014-02-15
+135 2014-03-15
+136 2014-04-15
+137 2014-05-15
+138 2014-06-15
+139 2014-07-15
+140 2014-08-15
+141 2014-09-15
+142 2014-10-15
+143 2014-11-15
+144 2014-12-15
diff -r 000000000000 -r fea8a53f8099 test-data/var_tab_dataset-ibi
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/var_tab_dataset-ibi Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,7 @@
+time 1 time 145
+chl 4 time 145 depth 1 latitude 97 longitude 103
+nh4 4 time 145 depth 1 latitude 97 longitude 103
+longitude 1 longitude 103
+latitude 1 latitude 97
+depth 1 depth 1
+phy 4 time 145 depth 1 latitude 97 longitude 103
diff -r 000000000000 -r fea8a53f8099 test-data/version.tabular
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/version.tabular Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,1 @@
+Galaxy xarray version 0.18.2
diff -r 000000000000 -r fea8a53f8099 xarray_coords_info.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_coords_info.xml Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,53 @@
+
+ Get values for each coordinate of a Netcdf file
+
+ macros.xml
+
+
+
+ python
+ netcdf4
+ xarray
+ geopandas
+ shapely
+
+ output_dir/version.tabular &&
+ python3 '$__tool_directory__/xarray_tool.py' '$input' --coords_info output_dir
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff -r 000000000000 -r fea8a53f8099 xarray_mapplot.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_mapplot.py Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,457 @@
+#!/usr/bin/env python3
+#
+#
+# usage: xarray_mapplot.py [-h] [--proj PROJ]
+# [--cmap CMAP]
+# [--output OUTPUT]
+# [--time TIMES]
+# [--nrow NROW]
+# [--ncol NCOL]
+# [--title title]
+# [--latitude LATITUDE]
+# [--longitude LONGITUDE]
+# [--land ALPHA-LAND]
+# [--ocean ALPHA-OCEAN]
+# [--coastline ALPHA-COASTLINE]
+# [--borders ALPHA-BORDERS]
+# [--xlim "x1,x2"]
+# [--ylim "y1,y2"]
+# [--range "valmin,valmax"]
+# [--threshold VAL]
+# [--label label-colorbar]
+# [--shift]
+# [-v]
+# input varname
+#
+# positional arguments:
+# input input filename with geographical coordinates (netCDF
+# format)
+# varname Specify which variable to plot (case sensitive)
+#
+# optional arguments:
+# -h, --help show this help message and exit
+# --proj PROJ Specify the projection on which we draw
+# --cmap CMAP Specify which colormap to use for plotting
+# --output OUTPUT output filename to store resulting image (png format)
+# --time TIMES time index from the file for multiple plots ("0 1 2 3")
+# --title plot or subplot title
+# --latitude variable name for latitude
+# --longitude variable name for longitude
+# --land add land on plot with alpha value [0-1]
+# --ocean add oceans on plot with alpha value [0-1]
+# --coastline add coastline with alpha value [0-1]
+# --borders add country borders with alpha value [0-1]
+# --xlim limited geographical area longitudes "x1,x2"
+# --ylim limited geographical area latitudes "y1,y2"
+# --range "valmin,valmax" for plotting
+# --threshold do not plot values below threshold
+# --label set a label for colormap
+# --shift shift longitudes if specified
+# -v, --verbose switch on verbose mode
+#
+
+import argparse
+import ast
+import warnings
+from pathlib import Path
+
+import cartopy.crs as ccrs
+import cartopy.feature as feature
+
+from cmcrameri import cm
+
+import matplotlib as mpl
+mpl.use('Agg')
+from matplotlib import pyplot # noqa: I202,E402
+
+import xarray as xr # noqa: E402
+
+
+class MapPlotXr ():
+ def __init__(self, input, proj, varname, cmap, output, verbose=False,
+ time=[], title="", latitude="latitude",
+ longitude="longitude", land=0, ocean=0,
+ coastline=0, borders=0, xlim=[], ylim=[],
+ threshold="", label="", shift=False,
+ range_values=[]):
+ self.input = input
+ print("PROJ", proj)
+ if proj != "" and proj is not None:
+ self.proj = proj.replace('X', ':')
+ else:
+ self.proj = proj
+ self.varname = varname
+ self.get_cmap(cmap)
+ self.time = time
+ self.latitude = latitude
+ self.longitude = longitude
+ self.land = land
+ self.ocean = ocean
+ self.coastline = coastline
+ self.borders = borders
+ self.xlim = xlim
+ self.ylim = ylim
+ self.range = range_values
+ self.threshold = threshold
+ self.shift = shift
+ self.xylim_supported = False
+ self.colorbar = True
+ self.title = title
+ if output is None:
+ self.output = Path(input).stem + '.png'
+ else:
+ self.output = output
+ self.verbose = verbose
+ self.dset = xr.open_dataset(self.input, use_cftime=True)
+
+ self.label = {}
+ if label != "" and label is not None:
+ self.label['label'] = label
+ if verbose:
+ print("input: ", self.input)
+ print("proj: ", self.proj)
+ print("varname: ", self.varname)
+ print("time: ", self.time)
+ print("minval, maxval: ", self.range)
+ print("title: ", self.title)
+ print("output: ", self.output)
+ print("label: ", self.label)
+ print("shift: ", self.shift)
+ print("ocean: ", self.ocean)
+ print("land: ", self.land)
+ print("coastline: ", self.coastline)
+ print("borders: ", self.borders)
+ print("latitude: ", self.latitude)
+ print("longitude: ", self.longitude)
+ print("xlim: ", self.xlim)
+ print("ylim: ", self.ylim)
+
+ def get_cmap(self, cmap):
+ if cmap[0:3] == 'cm.':
+ self.cmap = cm.__dict__[cmap[3:]]
+ else:
+ self.cmap = cmap
+
+ def projection(self):
+ if self.proj is None:
+ return ccrs.PlateCarree()
+
+ proj_dict = ast.literal_eval(self.proj)
+
+ user_proj = proj_dict.pop("proj")
+ if user_proj == 'PlateCarree':
+ self.xylim_supported = True
+ return ccrs.PlateCarree(**proj_dict)
+ elif user_proj == 'AlbersEqualArea':
+ return ccrs.AlbersEqualArea(**proj_dict)
+ elif user_proj == 'AzimuthalEquidistant':
+ return ccrs.AzimuthalEquidistant(**proj_dict)
+ elif user_proj == 'EquidistantConic':
+ return ccrs.EquidistantConic(**proj_dict)
+ elif user_proj == 'LambertConformal':
+ return ccrs.LambertConformal(**proj_dict)
+ elif user_proj == 'LambertCylindrical':
+ return ccrs.LambertCylindrical(**proj_dict)
+ elif user_proj == 'Mercator':
+ return ccrs.Mercator(**proj_dict)
+ elif user_proj == 'Miller':
+ return ccrs.Miller(**proj_dict)
+ elif user_proj == 'Mollweide':
+ return ccrs.Mollweide(**proj_dict)
+ elif user_proj == 'Orthographic':
+ return ccrs.Orthographic(**proj_dict)
+ elif user_proj == 'Robinson':
+ return ccrs.Robinson(**proj_dict)
+ elif user_proj == 'Sinusoidal':
+ return ccrs.Sinusoidal(**proj_dict)
+ elif user_proj == 'Stereographic':
+ return ccrs.Stereographic(**proj_dict)
+ elif user_proj == 'TransverseMercator':
+ return ccrs.TransverseMercator(**proj_dict)
+ elif user_proj == 'UTM':
+ return ccrs.UTM(**proj_dict)
+ elif user_proj == 'InterruptedGoodeHomolosine':
+ return ccrs.InterruptedGoodeHomolosine(**proj_dict)
+ elif user_proj == 'RotatedPole':
+ return ccrs.RotatedPole(**proj_dict)
+ elif user_proj == 'OSGB':
+ self.xylim_supported = False
+ return ccrs.OSGB(**proj_dict)
+ elif user_proj == 'EuroPP':
+ self.xylim_supported = False
+ return ccrs.EuroPP(**proj_dict)
+ elif user_proj == 'Geostationary':
+ return ccrs.Geostationary(**proj_dict)
+ elif user_proj == 'NearsidePerspective':
+ return ccrs.NearsidePerspective(**proj_dict)
+ elif user_proj == 'EckertI':
+ return ccrs.EckertI(**proj_dict)
+ elif user_proj == 'EckertII':
+ return ccrs.EckertII(**proj_dict)
+ elif user_proj == 'EckertIII':
+ return ccrs.EckertIII(**proj_dict)
+ elif user_proj == 'EckertIV':
+ return ccrs.EckertIV(**proj_dict)
+ elif user_proj == 'EckertV':
+ return ccrs.EckertV(**proj_dict)
+ elif user_proj == 'EckertVI':
+ return ccrs.EckertVI(**proj_dict)
+ elif user_proj == 'EqualEarth':
+ return ccrs.EqualEarth(**proj_dict)
+ elif user_proj == 'Gnomonic':
+ return ccrs.Gnomonic(**proj_dict)
+ elif user_proj == 'LambertAzimuthalEqualArea':
+ return ccrs.LambertAzimuthalEqualArea(**proj_dict)
+ elif user_proj == 'NorthPolarStereo':
+ return ccrs.NorthPolarStereo(**proj_dict)
+ elif user_proj == 'OSNI':
+ return ccrs.OSNI(**proj_dict)
+ elif user_proj == 'SouthPolarStereo':
+ return ccrs.SouthPolarStereo(**proj_dict)
+
+ def plot(self, ts=None):
+ if self.shift:
+ if self.longitude == 'longitude':
+ self.dset = self.dset.assign_coords(
+ longitude=(((
+ self.dset[self.longitude]
+ + 180) % 360) - 180))
+ elif self.longitude == 'lon':
+ self.dset = self.dset.assign_coords(
+ lon=(((self.dset[self.longitude]
+ + 180) % 360) - 180))
+
+ pyplot.figure(1, figsize=[20, 10])
+
+ # Set the projection to use for plotting
+ ax = pyplot.subplot(1, 1, 1, projection=self.projection())
+ if self.land:
+ ax.add_feature(feature.LAND, alpha=self.land)
+
+ if self.ocean:
+ ax.add_feature(feature.OCEAN, alpha=self.ocean)
+ if self.coastline:
+ ax.coastlines(resolution='10m', alpha=self.coastline)
+ if self.borders:
+ ax.add_feature(feature.BORDERS, linestyle=':', alpha=self.borders)
+
+ if self.xlim:
+ min_lon = min(self.xlim[0], self.xlim[1])
+ max_lon = max(self.xlim[0], self.xlim[1])
+ else:
+ min_lon = self.dset[self.longitude].min()
+ max_lon = self.dset[self.longitude].max()
+
+ if self.ylim:
+ min_lat = min(self.ylim[0], self.ylim[1])
+ max_lat = max(self.ylim[0], self.ylim[1])
+ else:
+ min_lat = self.dset[self.latitude].min()
+ max_lat = self.dset[self.latitude].max()
+
+ if self.xylim_supported:
+ pyplot.xlim(min_lon, max_lon)
+ pyplot.ylim(min_lat, max_lat)
+
+ # Fix extent
+ if self.threshold == "" or self.threshold is None:
+ threshold = self.dset[self.varname].min()
+ else:
+ threshold = float(self.threshold)
+
+ if self.range == []:
+ minval = self.dset[self.varname].min()
+ maxval = self.dset[self.varname].max()
+ else:
+ minval = self.range[0]
+ maxval = self.range[1]
+
+ if self.verbose:
+ print("minval: ", minval)
+ print("maxval: ", maxval)
+
+ # pass extent with vmin and vmax parameters
+ proj_t = ccrs.PlateCarree()
+ if ts is None:
+ self.dset.where(
+ self.dset[self.varname] > threshold
+ )[self.varname].plot(ax=ax,
+ vmin=minval,
+ vmax=maxval,
+ transform=proj_t,
+ cmap=self.cmap,
+ cbar_kwargs=self.label
+ )
+ if self.title != "" and self.title is not None:
+ pyplot.title(self.title)
+ pyplot.savefig(self.output)
+ else:
+ if self.colorbar:
+ self.dset.where(
+ self.dset[self.varname] > threshold
+ )[self.varname].isel(time=ts).plot(ax=ax,
+ vmin=minval,
+ vmax=maxval,
+ transform=proj_t,
+ cmap=self.cmap,
+ cbar_kwargs=self.label
+ )
+ else:
+ self.dset.where(
+ self.dset[self.varname] > minval
+ )[self.varname].isel(time=ts).plot(ax=ax,
+ vmin=minval,
+ vmax=maxval,
+ transform=proj_t,
+ cmap=self.cmap,
+ add_colorbar=False)
+ if self.title != "" and self.title is not None:
+ pyplot.title(self.title + "(time = " + str(ts) + ')')
+ pyplot.savefig(self.output[:-4] + "_time" + str(ts) +
+ self.output[-4:]) # assume png format
+
+
+if __name__ == '__main__':
+ warnings.filterwarnings("ignore")
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'input',
+ help='input filename with geographical coordinates (netCDF format)'
+ )
+
+ parser.add_argument(
+ '--proj',
+ help='Specify the projection on which we draw'
+ )
+ parser.add_argument(
+ 'varname',
+ help='Specify which variable to plot (case sensitive)'
+ )
+ parser.add_argument(
+ '--cmap',
+ help='Specify which colormap to use for plotting'
+ )
+ parser.add_argument(
+ '--output',
+ help='output filename to store resulting image (png format)'
+ )
+ parser.add_argument(
+ '--time',
+ help='list of times to plot for multiple plots'
+ )
+ parser.add_argument(
+ '--title',
+ help='plot title'
+ )
+ parser.add_argument(
+ '--latitude',
+ help='variable name for latitude'
+ )
+ parser.add_argument(
+ '--longitude',
+ help='variable name for longitude'
+ )
+ parser.add_argument(
+ '--land',
+ help='add land on plot with alpha value [0-1]'
+ )
+ parser.add_argument(
+ '--ocean',
+ help='add oceans on plot with alpha value [0-1]'
+ )
+ parser.add_argument(
+ '--coastline',
+ help='add coastline with alpha value [0-1]'
+ )
+ parser.add_argument(
+ '--borders',
+ help='add country borders with alpha value [0-1]'
+ )
+ parser.add_argument(
+ '--xlim',
+ help='limited geographical area longitudes "x1,x2"'
+ )
+ parser.add_argument(
+ '--ylim',
+ help='limited geographical area latitudes "y1,y2"'
+ )
+ parser.add_argument(
+ '--range',
+ help='min and max values for plotting "minval,maxval"'
+ )
+ parser.add_argument(
+ '--threshold',
+ help='do not plot values below threshold'
+ )
+ parser.add_argument(
+ '--label',
+ help='set a label for colorbar'
+ )
+ parser.add_argument(
+ '--shift',
+ help='shift longitudes if specified',
+ action="store_true"
+ )
+ parser.add_argument(
+ "-v", "--verbose",
+ help="switch on verbose mode",
+ action="store_true")
+ args = parser.parse_args()
+
+ if args.time is None:
+ time = []
+ else:
+ time = list(map(int, args.time.split(",")))
+ if args.xlim is None:
+ xlim = []
+ else:
+ xlim = list(map(float, args.xlim.split(",")))
+ if args.ylim is None:
+ ylim = []
+ else:
+ ylim = list(map(float, args.ylim.split(",")))
+ if args.range is None:
+ range_values = []
+ else:
+ range_values = list(map(float, args.range.split(",")))
+ if args.latitude is None:
+ latitude = "latitude"
+ else:
+ latitude = args.latitude
+ if args.longitude is None:
+ longitude = "longitude"
+ else:
+ longitude = args.longitude
+ if args.land is None:
+ land = 0
+ else:
+ land = float(args.land)
+ if args.ocean is None:
+ ocean = 0
+ else:
+ ocean = float(args.ocean)
+ if args.coastline is None:
+ coastline = 0
+ else:
+ coastline = float(args.coastline)
+ if args.borders is None:
+ borders = 0
+ else:
+ borders = float(args.borders)
+
+ dset = MapPlotXr(input=args.input, proj=args.proj, varname=args.varname,
+ cmap=args.cmap, output=args.output, verbose=args.verbose,
+ time=time, title=args.title,
+ latitude=latitude, longitude=longitude, land=land,
+ ocean=ocean, coastline=coastline, borders=borders,
+ xlim=xlim, ylim=ylim, threshold=args.threshold,
+ label=args.label, shift=args.shift,
+ range_values=range_values)
+
+ if dset.time == []:
+ dset.plot()
+ else:
+ for t in dset.time:
+ dset.plot(t)
+ dset.shift = False # only shift once
+ dset.colorbar = True
diff -r 000000000000 -r fea8a53f8099 xarray_netcdf2netcdf.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_netcdf2netcdf.py Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+#
+# Apply operations on selected variables
+# - scale
+# one can also select the range of time (for timeseries)
+# to apply these operations over the range only
+# when a range of time is selected and when scaling, one
+# can choose to save the entire timeseries or
+# the selected range only.
+# when scaling, one can add additional filters on dimensions
+# (typically used to filter over latitudes and longitudes)
+
+
+import argparse
+import warnings
+
+import xarray as xr # noqa: E402
+
+
+class netCDF2netCDF ():
+ def __init__(self, infile, varname, scale="",
+ output="output.netcdf",
+ write_all=False,
+ filter_list="",
+ verbose=False):
+ self.infile = infile
+ self.verbose = verbose
+ self.varname = varname
+ self.write_all = write_all
+ self.filter = filter_list
+ self.selection = {}
+ if scale == "" or scale is None:
+ self.scale = 1
+ else:
+ self.scale = float(scale)
+ if output is None:
+ self.output = "output.netcdf"
+ else:
+ self.output = output
+ # initialization
+ self.dset = None
+ self.subset = None
+ if self.verbose:
+ print("infile: ", self.infile)
+ print("varname: ", self.varname)
+ print("filter_list: ", self.filter)
+ print("scale: ", self.scale)
+ print("write_all: ", self.write_all)
+ print("output: ", self.output)
+
+ def dimension_selection(self, single_filter):
+ split_filter = single_filter.split('#')
+ dimension_varname = split_filter[0]
+ op = split_filter[1]
+ ll = int(split_filter[2])
+ if (op == 'sl'):
+ rl = int(split_filter[3])
+ self.selection[dimension_varname] = slice(ll, rl)
+ elif (op == 'to'):
+ self.selection[dimension_varname] = slice(None, ll)
+ elif (op == 'from'):
+ self.selection[dimension_varname] = slice(ll, None)
+ elif (op == 'is'):
+ self.selection[dimension_varname] = ll
+
+ def filter_selection(self):
+ for single_filter in self.filter:
+ self.dimension_selection(single_filter)
+ if self.write_all:
+ self.ds[self.varname] = \
+ self.ds[self.varname].isel(self.selection)*self.scale
+ else:
+ self.dset = \
+ self.ds[self.varname].isel(self.selection)*self.scale
+
+ def compute(self):
+ if self.dset is None:
+ self.ds = xr.open_dataset(self.infile)
+ if self.filter:
+ self.filter_selection()
+ if self.verbose:
+ print(self.selection)
+ elif self.write_all is not None:
+ self.dset = self.ds[self.varname]
+
+ def save(self):
+ if self.write_all:
+ self.ds.to_netcdf(self.output)
+ else:
+ self.dset.to_netcdf(self.output)
+
+
+if __name__ == '__main__':
+ warnings.filterwarnings("ignore")
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ 'input',
+ help='input filename in netCDF format'
+ )
+ parser.add_argument(
+ 'varname',
+ help='Specify which variable to plot (case sensitive)'
+ )
+ parser.add_argument(
+ '--filter',
+ nargs="*",
+ help='Filter list variable#operator#value_s#value_e'
+ )
+ parser.add_argument(
+ '--output',
+ help='Output filename to store the resulting netCDF file'
+ )
+ parser.add_argument(
+ '--scale',
+ help='scale factor to apply to selection (float)'
+ )
+ parser.add_argument(
+ "--write_all",
+ help="write all data to netCDF",
+ action="store_true")
+ parser.add_argument(
+ "-v", "--verbose",
+ help="switch on verbose mode",
+ action="store_true")
+ args = parser.parse_args()
+
+ dset = netCDF2netCDF(infile=args.input, varname=args.varname,
+ scale=args.scale, output=args.output,
+ filter_list=args.filter,
+ write_all=args.write_all,
+ verbose=args.verbose)
+ dset.compute()
+ dset.save()
diff -r 000000000000 -r fea8a53f8099 xarray_tool.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/xarray_tool.py Sun Jun 06 08:50:43 2021 +0000
@@ -0,0 +1,365 @@
+# xarray tool for:
+# - getting metadata information
+# - select data and save results in csv file for further post-processing
+
+import argparse
+import csv
+import os
+import warnings
+
+import geopandas as gdp
+
+import pandas as pd
+
+from shapely.geometry import Point
+from shapely.ops import nearest_points
+
+import xarray as xr
+
+
+class XarrayTool ():
+ def __init__(self, infile, outfile_info="", outfile_summary="",
+ select="", outfile="", outputdir="", latname="",
+ latvalN="", latvalS="", lonname="", lonvalE="",
+ lonvalW="", filter_list="", coords="", time="",
+ verbose=False, no_missing=False, coords_info=None,
+ tolerance=None):
+ self.infile = infile
+ self.outfile_info = outfile_info
+ self.outfile_summary = outfile_summary
+ self.select = select
+ self.outfile = outfile
+ self.outputdir = outputdir
+ self.latname = latname
+ if tolerance != "" and tolerance is not None:
+ self.tolerance = float(tolerance)
+ else:
+ self.tolerance = -1
+ if latvalN != "" and latvalN is not None:
+ self.latvalN = float(latvalN)
+ else:
+ self.latvalN = ""
+ if latvalS != "" and latvalS is not None:
+ self.latvalS = float(latvalS)
+ else:
+ self.latvalS = ""
+ self.lonname = lonname
+ if lonvalE != "" and lonvalE is not None:
+ self.lonvalE = float(lonvalE)
+ else:
+ self.lonvalE = ""
+ if lonvalW != "" and lonvalW is not None:
+ self.lonvalW = float(lonvalW)
+ else:
+ self.lonvalW = ""
+ self.filter = filter_list
+ self.time = time
+ self.coords = coords
+ self.verbose = verbose
+ self.no_missing = no_missing
+ # initialization
+ self.dset = None
+ self.gset = None
+ self.coords_info = coords_info
+ if self.verbose:
+ print("infile: ", self.infile)
+ print("outfile_info: ", self.outfile_info)
+ print("outfile_summary: ", self.outfile_summary)
+ print("outfile: ", self.outfile)
+ print("select: ", self.select)
+ print("outfile: ", self.outfile)
+ print("outputdir: ", self.outputdir)
+ print("latname: ", self.latname)
+ print("latvalN: ", self.latvalN)
+ print("latvalS: ", self.latvalS)
+ print("lonname: ", self.lonname)
+ print("lonvalE: ", self.lonvalE)
+ print("lonvalW: ", self.lonvalW)
+ print("filter: ", self.filter)
+ print("time: ", self.time)
+ print("coords: ", self.coords)
+ print("coords_info: ", self.coords_info)
+
+ def info(self):
+ f = open(self.outfile_info, 'w')
+ ds = xr.open_dataset(self.infile)
+ ds.info(f)
+ f.close()
+
+ def summary(self):
+ f = open(self.outfile_summary, 'w')
+ ds = xr.open_dataset(self.infile)
+ writer = csv.writer(f, delimiter='\t')
+ header = ['VariableName', 'NumberOfDimensions']
+ for idx, val in enumerate(ds.dims.items()):
+ header.append('Dim' + str(idx) + 'Name')
+ header.append('Dim' + str(idx) + 'Size')
+ writer.writerow(header)
+ for name, da in ds.data_vars.items():
+ line = [name]
+ line.append(len(ds[name].shape))
+ for d, s in zip(da.shape, da.sizes):
+ line.append(s)
+ line.append(d)
+ writer.writerow(line)
+ for name, da in ds.coords.items():
+ line = [name]
+ line.append(len(ds[name].shape))
+ for d, s in zip(da.shape, da.sizes):
+ line.append(s)
+ line.append(d)
+ writer.writerow(line)
+ f.close()
+
+ def rowfilter(self, single_filter):
+ split_filter = single_filter.split('#')
+ filter_varname = split_filter[0]
+ op = split_filter[1]
+ ll = float(split_filter[2])
+ if (op == 'bi'):
+ rl = float(split_filter[3])
+ if filter_varname == self.select:
+ # filter on values of the selected variable
+ if op == 'bi':
+ self.dset = self.dset.where(
+ (self.dset <= rl) & (self.dset >= ll)
+ )
+ elif op == 'le':
+ self.dset = self.dset.where(self.dset <= ll)
+ elif op == 'ge':
+ self.dset = self.dset.where(self.dset >= ll)
+ elif op == 'e':
+ self.dset = self.dset.where(self.dset == ll)
+ else: # filter on other dimensions of the selected variable
+ if op == 'bi':
+ self.dset = self.dset.sel({filter_varname: slice(ll, rl)})
+ elif op == 'le':
+ self.dset = self.dset.sel({filter_varname: slice(None, ll)})
+ elif op == 'ge':
+ self.dset = self.dset.sel({filter_varname: slice(ll, None)})
+ elif op == 'e':
+ self.dset = self.dset.sel({filter_varname: ll},
+ method='nearest')
+
+ def selection(self):
+ if self.dset is None:
+ self.ds = xr.open_dataset(self.infile)
+ self.dset = self.ds[self.select] # select variable
+ if self.time:
+ self.datetime_selection()
+ if self.filter:
+ self.filter_selection()
+
+ self.area_selection()
+ if self.gset.count() > 1:
+ # convert to dataframe if several rows and cols
+ self.gset = self.gset.to_dataframe().dropna(how='all'). \
+ reset_index()
+ self.gset.to_csv(self.outfile, header=True, sep='\t')
+ else:
+ data = {
+ self.latname: [self.gset[self.latname].values],
+ self.lonname: [self.gset[self.lonname].values],
+ self.select: [self.gset.values]
+ }
+
+ df = pd.DataFrame(data, columns=[self.latname, self.lonname,
+ self.select])
+ df.to_csv(self.outfile, header=True, sep='\t')
+
+ def datetime_selection(self):
+ split_filter = self.time.split('#')
+ time_varname = split_filter[0]
+ op = split_filter[1]
+ ll = split_filter[2]
+ if (op == 'sl'):
+ rl = split_filter[3]
+ self.dset = self.dset.sel({time_varname: slice(ll, rl)})
+ elif (op == 'to'):
+ self.dset = self.dset.sel({time_varname: slice(None, ll)})
+ elif (op == 'from'):
+ self.dset = self.dset.sel({time_varname: slice(ll, None)})
+ elif (op == 'is'):
+ self.dset = self.dset.sel({time_varname: ll}, method='nearest')
+
+ def filter_selection(self):
+ for single_filter in self.filter:
+ self.rowfilter(single_filter)
+
+ def area_selection(self):
+
+ if self.latvalS != "" and self.lonvalW != "":
+ # Select geographical area
+ self.gset = self.dset.sel({self.latname:
+ slice(self.latvalS, self.latvalN),
+ self.lonname:
+ slice(self.lonvalW, self.lonvalE)})
+ elif self.latvalN != "" and self.lonvalE != "":
+ # select nearest location
+ if self.no_missing:
+ self.nearest_latvalN = self.latvalN
+ self.nearest_lonvalE = self.lonvalE
+ else:
+ # find nearest location without NaN values
+ self.nearest_location()
+ if self.tolerance > 0:
+ self.gset = self.dset.sel({self.latname: self.nearest_latvalN,
+ self.lonname: self.nearest_lonvalE},
+ method='nearest',
+ tolerance=self.tolerance)
+ else:
+ self.gset = self.dset.sel({self.latname: self.nearest_latvalN,
+ self.lonname: self.nearest_lonvalE},
+ method='nearest')
+ else:
+ self.gset = self.dset
+
+ def nearest_location(self):
+ # Build a geopandas dataframe with all first elements in each dimension
+ # so we assume null values correspond to a mask that is the same for
+ # all dimensions in the dataset.
+ dsel_frame = self.dset
+ for dim in self.dset.dims:
+ if dim != self.latname and dim != self.lonname:
+ dsel_frame = dsel_frame.isel({dim: 0})
+ # transform to pandas dataframe
+ dff = dsel_frame.to_dataframe().dropna().reset_index()
+ # transform to geopandas to collocate
+ gdf = gdp.GeoDataFrame(dff,
+ geometry=gdp.points_from_xy(dff[self.lonname],
+ dff[self.latname]))
+ # Find nearest location where values are not null
+ point = Point(self.lonvalE, self.latvalN)
+ multipoint = gdf.geometry.unary_union
+ queried_geom, nearest_geom = nearest_points(point, multipoint)
+ self.nearest_latvalN = nearest_geom.y
+ self.nearest_lonvalE = nearest_geom.x
+
+ def selection_from_coords(self):
+ fcoords = pd.read_csv(self.coords, sep='\t')
+ for row in fcoords.itertuples():
+ self.latvalN = row[0]
+ self.lonvalE = row[1]
+ self.outfile = (os.path.join(self.outputdir,
+ self.select + '_' +
+ str(row.Index) + '.tabular'))
+ self.selection()
+
+ def get_coords_info(self):
+ ds = xr.open_dataset(self.infile)
+ for c in ds.coords:
+ filename = os.path.join(self.coords_info,
+ c.strip() +
+ '.tabular')
+ pd = ds.coords[c].to_pandas()
+ pd.index = range(len(pd))
+ pd.to_csv(filename, header=False, sep='\t')
+
+
+if __name__ == '__main__':
+ warnings.filterwarnings("ignore")
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ 'infile',
+ help='netCDF input filename'
+ )
+ parser.add_argument(
+ '--info',
+ help='Output filename where metadata information is stored'
+ )
+ parser.add_argument(
+ '--summary',
+ help='Output filename where data summary information is stored'
+ )
+ parser.add_argument(
+ '--select',
+ help='Variable name to select'
+ )
+ parser.add_argument(
+ '--latname',
+ help='Latitude name'
+ )
+ parser.add_argument(
+ '--latvalN',
+ help='North latitude value'
+ )
+ parser.add_argument(
+ '--latvalS',
+ help='South latitude value'
+ )
+ parser.add_argument(
+ '--lonname',
+ help='Longitude name'
+ )
+ parser.add_argument(
+ '--lonvalE',
+ help='East longitude value'
+ )
+ parser.add_argument(
+ '--lonvalW',
+ help='West longitude value'
+ )
+ parser.add_argument(
+ '--tolerance',
+ help='Maximum distance between original and selected value for '
+ ' inexact matches e.g. abs(index[indexer] - target) <= tolerance'
+ )
+ parser.add_argument(
+ '--coords',
+ help='Input file containing Latitude and Longitude'
+ 'for geographical selection'
+ )
+ parser.add_argument(
+ '--coords_info',
+ help='output-folder where for each coordinate, coordinate values '
+ ' are being printed in the corresponding outputfile'
+ )
+ parser.add_argument(
+ '--filter',
+ nargs="*",
+ help='Filter list variable#operator#value_s#value_e'
+ )
+ parser.add_argument(
+ '--time',
+ help='select timeseries variable#operator#value_s[#value_e]'
+ )
+ parser.add_argument(
+ '--outfile',
+ help='csv outfile for storing results of the selection'
+ '(valid only when --select)'
+ )
+ parser.add_argument(
+ '--outputdir',
+ help='folder name for storing results with multiple selections'
+ '(valid only when --select)'
+ )
+ parser.add_argument(
+ "-v", "--verbose",
+ help="switch on verbose mode",
+ action="store_true"
+ )
+ parser.add_argument(
+ "--no_missing",
+ help="""Do not take into account possible null/missing values
+ (only valid for single location)""",
+ action="store_true"
+ )
+ args = parser.parse_args()
+
+ p = XarrayTool(args.infile, args.info, args.summary, args.select,
+ args.outfile, args.outputdir, args.latname,
+ args.latvalN, args.latvalS, args.lonname,
+ args.lonvalE, args.lonvalW, args.filter,
+ args.coords, args.time, args.verbose,
+ args.no_missing, args.coords_info, args.tolerance)
+ if args.info:
+ p.info()
+ if args.summary:
+ p.summary()
+ if args.coords:
+ p.selection_from_coords()
+ elif args.select:
+ p.selection()
+ elif args.coords_info:
+ p.get_coords_info()