Mercurial > repos > climate > eodie
changeset 0:81b0ca76435d draft default tip
"planemo upload for repository https://gitlab.com/eetun-tiimi/EODIE commit c4a5672398bc878dd2bc0bf4f3a26f59b3f6395c"
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/eodie.xml Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,363 @@ +<tool id="eodie" name="EODIE" version="@VERSION@" profile="20.09"> + <description>converts simple features data between file formats</description> + <macros> + <import>macros.xml</import> + </macros> + <expand macro="edam_ontology"/> + <requirements> + <requirement type="package" version="@VERSION@">eodie</requirement> + <requirement type="package" version="3.9">python</requirement> + <requirement type="package" version="1.8">shapely</requirement> + <requirement type="package" version="1.2">rasterio</requirement> + <requirement type="package" version="0.16">rasterstats</requirement> + <requirement type="package" version="1.8">fiona</requirement> + <requirement type="package" version="1.21">numpy</requirement> + <requirement type="package" version="3.4.0">gdal</requirement> + <requirement type="package" version="6.0">pyyaml</requirement> + <requirement type="package" version="6.0">unzip</requirement> + <requirement type="package" version="1.34">tar</requirement> + </requirements> + <command detect_errors="exit_code"><![CDATA[ + mkdir -p work/data_collection work/results results_csv results_tiffs && + cp '$config_tif' config_tif.yml && + cp '$config_s2' config_s2.yml && + cp '$config_ls8' config_ls8.yml && + cp '$user_config' user_config.yml && + cp '${input.extra_files_path}'/* work/ && + cd work && + ln -s '${input_type.input_file}' '${input_type.input_file.element_identifier}' && + #if $input_type.input_file.is_of_type("zip") + unzip '$input_type.input_file' -d data_collection/ && + #elif $input_type.input_file.is_of_type("tar") + tar -xf '$input_type.input_file' -C data_collection && + #end if + #if str($input_type.platform) == 's2' + mkdir -p sentinel2_tiles_world && + (cp '$input_type.s2_shp.extra_files_path'/shapefile.shp sentinel2_tiles_world/sentinel2_tiles_world.shp || true ) && + (cp '$input_type.s2_shp.extra_files_path'/shapefile.shx sentinel2_tiles_world/sentinel2_tiles_world.shx || true ) && + (cp '$input_type.s2_shp.extra_files_path'/shapefile.dbf sentinel2_tiles_world/sentinel2_tiles_world.dbf || true ) && + (cp '$input_type.s2_shp.extra_files_path'/shapefile.prj sentinel2_tiles_world/sentinel2_tiles_world.prj || true ) && + #elif str($input_type.platform) == 'ls8' + mkdir -p WRS2_descending && + (cp '$input_type.ls8_shp.extra_files_path'/shapefile.shp WRS2_descending/WRS2_descending.shp || true ) && + (cp '$input_type.ls8_shp.extra_files_path'/shapefile.shx WRS2_descending/WRS2_descending.shx || true ) && + (cp '$input_type.ls8_shp.extra_files_path'/shapefile.dbf WRS2_descending/WRS2_descending.dbf || true ) && + (cp '$input_type.ls8_shp.extra_files_path'/shapefile.prj WRS2_descending/WRS2_descending.prj || true ) && + #end if + + eodie_process.py + --platform $input_type.platform + #if str($input_type.platform) == 'tif' + --file ${input_type.input_file.element_identifier} + #else + --dir data_collection + --out ./results + #end if + --shp shapefile + #for $i, $s in enumerate( $adv_options.statistics ) + #if str($i) == '0' + --statistics_out + --statistics + #end if + #if $s.stats_input.stats and str($s.stats_input.stats) != '': + #if str($s.stats_input.stats) == 'percentile': + '$s.stats_input.stats'_'$s.stats_input.p_value' + #else + '$s.stats_input.stats' + #end if + #end if + #end for + #if $adv_options.indices and str($adv_options.indices) != '': + --index + #for $idx in str($adv_options.indices).split(','): + '$idx' + #end for + #end if + #if $adv_options.start_date and str($adv_options.start_date) != '': + --start int($adv_options.start_date) + #end if + #if $adv_options.end_date and str($adv_options.end_date) != '': + --start int($adv_options.end_date) + #end if + $adv_options.exclude_border + $adv_options.exclude_splitshp + --id '$identifier' && + + mv results/*.log '$logfile' && + bash '$__tool_directory__/postprocess_csv.sh' 'results' '../results_csv' && + ( mv results/*.tif ../results_tiffs/ || echo "No tiff files generated" ) && + echo "EODIE data extractor is done" + ]]> </command> + <expand macro="configfiles"/> + <inputs> + <conditional name="input_type"> + <param name="platform" type="select" label="Select platform of the input data"> + <option value="s2">Sentinel 2</option> + <option value="ls8">Landsat 8</option> + <option value="tif">tiff file</option> + </param> + <when value="tif"> + <param name='input_file' format="tiff" type="data" label="Individual input file"/> + </when> + <when value="ls8"> + <param name="input_file" type="data" format='zip,tar' label="Landsat 8 input data (zip or tarball)" /> + <param name="ls8_shp" type="data" format="shp" label="Landsat 8 tile shapefile" help="Provide the Landsat-8 tile shapefile"/> + </when> + <when value="s2"> + <param name="input_file" type="data" format='zip,tar' label="Sentinel 2 input data (zip or tarball)" /> + <param name="s2_shp" type="data" format="shp" label="Sentinel-2 tile shapefile" help="Provide the Sentinel-2 tile shapefile"/> + </when> + </conditional> + <param type="data" name="input" format="shp" label="Shapefile with polygons" help="Provide shapefile with polygons"/> + <param name="identifier" type="text" value="PlotID" label="Name of identifier" /> + <section name="adv_options" title="Advanced options" expanded="false"> + <param name="start_date" type="text" optional="true" label="Time frame start date (YYYYMMDD)"> + <validator type="length" min="8" max="8" message="Please enter a date in the form of YYYYMMDD"/> + </param> + <param name="end_date" type="text" optional="true" label="Time frame end date (YYYYMMDD)"> + <validator type="length" min="8" max="8" message="Please enter a date in the form of YYYYMMDD"/> + </param> + <repeat name="statistics" title="Compute statistics" min="0"> + <conditional name="stats_input"> + <param name="stats" label="Statistics" type="select"> + <option value="mean">Mean</option> + <option value="sum">Sum</option> + <option value="min">Minimum</option> + <option value="max">Maximum</option> + <option value="std">Standard deviation</option> + <option value="median">Median</option> + <option value="majority">Majority</option> + <option value="minority">Minority</option> + <option value="unique">Unique</option> + <option value="range">Range</option> + <option value="percentile">Percentile</option> + </param> + <when value="mean"/> + <when value="sum"/> + <when value="min"/> + <when value="max"/> + <when value="std"/> + <when value="median"/> + <when value="majority"/> + <when value="minority"/> + <when value="unique"/> + <when value="range"/> + <when value="percentile"> + <param name="p_value" type="integer" value="0" min="0" max="100" label="percentile value (if percentile selected)" /> + </when> + </conditional> + </repeat> + <param name="indices" label="Indice to compute" type="select" multiple="true"> + <option value="ndvi">ndvi</option> + <option value="rvi">rvi</option> + <option value="savi">savi</option> + <option value="nbr">nbr</option> + <option value="kndvi">kndvi</option> + <option value="ndmi">ndmi</option> + <option value="mndwi">mndwi</option> + <option value="evi">evi</option> + <option value="evi2">evi2</option> + <option value="dvi">dvi</option> + <option value="cvi">cvi</option> + <option value="mcar">mcar</option> + <option value="ndi45">ndi45</option> + <option value="tctb">tctb</option> + <option value="tctg">tctg</option> + <option value="tctw">tctw</option> + <option value="ndwi">ndwi</option> + <option value="B02">B02</option> + <option value="B03">B03</option> + <option value="B04">B04</option> + <option value="B05">B05</option> + <option value="B06">B06</option> + <option value="B07">B07</option> + <option value="B08">B08</option> + <option value="B8A">B8A</option> + <option value="B11">B11</option> + <option value="B12">B12</option> + </param> + <param name="exclude_splitshp" type="boolean" checked="false" label="exclude splitshp" help="Flag to indicate that splitshp has been run manually beforehand" truevalue="--exclude_splitshp" falsevalue="" /> + <param name="exclude_border" type="boolean" checked="false" label="exclude border" help="Flag to indicate that border pixels (within the polygon) should be excluded from statistics calculations / array extraction" truevalue="--exclude_border" falsevalue="" /> + <param name="geotiff_out" type="boolean" checked="false" label="generate geotiffs" help="flag to indicate that geotiffs shall be extracted" truevalue="--geotiff_out" falsevalue="" /> + </section> + </inputs> + <outputs> + <data name="logfile" format="txt"/> + <collection name="csv_files" type="list" label="${tool.name} (statistics csv outputs)"> + <discover_datasets pattern="__name__" directory="results_csv" visible="false" format="tabular"/> + </collection> + </outputs> + <tests> + <test> + <conditional name="input_type"> + <param name="platform" value="tif" /> + <param name="input_file" ftype="tiff" value="smaller_area_20100401.tif" /> + </conditional> + <param name="input" value="test_polygons/test_polygons.html" ftype="shp"> + <composite_data value="test_polygons/test_polygons.shp"/> + <composite_data value="test_polygons/test_polygons.shx"/> + <composite_data value="test_polygons/test_polygons.dbf"/> + <composite_data value="test_polygons/test_polygons.prj"/> + </param> + <param name="identifier" value="id"/> + <section name="adv_options"> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="mean"/> + </conditional> + </repeat> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="percentile"/> + <param name="p_value" value="10"/> + </conditional> + </repeat> + <param name="exclude_splitshp" value="true"/> + </section> + <output file="20211213-104427.log" name="logfile" compare="sim_size" delta="100"/> + <output_collection name="csv_files" type="list" count="1"> + <element name="testrgb_20100401__statistics.csv" ftype="tabular" file="testrgb_20100401__statistics.csv" compare="diff" lines_diff="1"> + <assert_contents> + <has_text text="percentile_10" /> + <has_text text="20508" /> + <has_text text="12049" /> + <has_text text="4508" /> + <has_text text="110." /> + <has_text text="147." /> + <has_text text="32." /> + <has_n_columns n="4" /> + </assert_contents> + </element> + + + </output_collection> + </test> + <test> + <conditional name="input_type"> + <param name="platform" value="ls8" /> + <param name="input_file" ftype="tar" value="LS8.tar" /> + <param name="ls8_shp" ftype="shp" value="WRS2_descending/WRS2_descending.html"> + <composite_data value="WRS2_descending/WRS2_descending.shp"/> + <composite_data value="WRS2_descending/WRS2_descending.shx"/> + <composite_data value="WRS2_descending/WRS2_descending.dbf"/> + <composite_data value="WRS2_descending/WRS2_descending.prj"/> + </param> + </conditional> + <param name="input" value="test_parcels_32635/test_parcels_32635.html" ftype="shp"> + <composite_data value="test_parcels_32635/test_parcels_32635.shp"/> + <composite_data value="test_parcels_32635/test_parcels_32635.shx"/> + <composite_data value="test_parcels_32635/test_parcels_32635.dbf"/> + <composite_data value="test_parcels_32635/test_parcels_32635.prj"/> + </param> + <param name="identifier" value="ID"/> + <section name="adv_options"> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="mean"/> + </conditional> + </repeat> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="median"/> + </conditional> + </repeat> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="std"/> + </conditional> + </repeat> + <param name="indices" value="ndvi"/> + </section> + <output file="20211218-102629.log" name="logfile" compare="sim_size" delta="100"/> + <output_collection name="csv_files" type="list" count="1"> + <element name="ndvi_20190628_189017_statistics.csv" ftype="tabular" file="ndvi_20190628_189017_statistics.csv" compare="diff" lines_diff="1"> + <assert_contents> + <has_text text="count" /> + <has_text text="32" /> + <has_text text="81" /> + <has_text text="48" /> + <has_text text=".384" /> + <has_text text=".304" /> + <has_text text=".424" /> + <has_text text=".387" /> + <has_text text=".295" /> + <has_text text=".432" /> + <has_text text=".033" /> + <has_text text=".078" /> + <has_text text=".031" /> + <has_n_columns n="5" /> + </assert_contents> + </element> + </output_collection> + </test> + <test> + <conditional name="input_type"> + <param name="platform" value="s2" /> + <param name="input_file" ftype="tar" value="S2.tar" /> + <param name="s2_shp" ftype="shp" value="sentinel2_tiles_world/sentinel2_tiles_world.html"> + <composite_data value="sentinel2_tiles_world/sentinel2_tiles_world.shp"/> + <composite_data value="sentinel2_tiles_world/sentinel2_tiles_world.shx"/> + <composite_data value="sentinel2_tiles_world/sentinel2_tiles_world.dbf"/> + <composite_data value="sentinel2_tiles_world/sentinel2_tiles_world.prj"/> + </param> + </conditional> + <param name="input" value="test_parcels_32635/test_parcels_32635.html" ftype="shp"> + <composite_data value="test_parcels_32635/test_parcels_32635.shp"/> + <composite_data value="test_parcels_32635/test_parcels_32635.shx"/> + <composite_data value="test_parcels_32635/test_parcels_32635.dbf"/> + <composite_data value="test_parcels_32635/test_parcels_32635.prj"/> + </param> + <param name="identifier" value="ID"/> + <section name="adv_options"> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="mean"/> + </conditional> + </repeat> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="median"/> + </conditional> + </repeat> + <repeat name="statistics"> + <conditional name="stats_input"> + <param name="stats" value="std"/> + </conditional> + </repeat> + <param name="indices" value="ndvi"/> + </section> + <output file="20211217-210343.log" name="logfile" compare="sim_size" delta="100"/> + <output_collection name="csv_files" type="list" count="1"> + <element name="ndvi_20200626_34VFN_statistics.csv" ftype="tabular" file="ndvi_20200626_34VFN_statistics.csv" compare="diff" lines_diff="1"> + <assert_contents> + <has_text text="204" /> + <has_text text="551" /> + <has_text text="335" /> + <has_text text=".321" /> + <has_text text=".293" /> + <has_text text=".661" /> + <has_text text=".302" /> + <has_text text=".294" /> + <has_text text=".686" /> + <has_text text=".091" /> + <has_text text=".088" /> + <has_text text=".091" /> + <has_n_columns n="5" /> + </assert_contents> + </element> + </output_collection> + </test> + </tests> + <help><![CDATA[ + +**EODIE - Earth Observation Data Information Extractor** +======================================================================================================= + +Toolkit to extract object based timeseries from Earth Observation data such as Copernicus Sentinel 2, Landsat 8 and more generally geotiff files. + +EODIE takes the objects in as polygons in a shapefile as well as the timeframe of interest and the features (eg vegetation indices) to be extracted. +The output is a per polygon timeseries of the selected features over the timeframe of interest. + + ]]> </help> + <expand macro="citations" /> +</tool>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/macros.xml Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,223 @@ +<macros> + <token name="@VERSION@">1.0.2</token> + <xml name="edam_ontology"> + <edam_topics> + <edam_topic>topic_0610</edam_topic> + <edam_topic>topic_3050</edam_topic> + </edam_topics> + </xml> + <xml name="configfiles"> + <configfiles> + <configfile name="config_tif"><![CDATA[ +platform: tif + +##the first that is found is used +datepattern: '20[1-2][0-9][0-1][0-9][0-3][0-9]' + +##name to write into output statistics filename +name: 'testrgb' + ]]> </configfile> + <configfile name="config_s2"><![CDATA[ +#### + +## Configuration file for the use of Sentinel-2 ## + +## Know what you do before you change anything here ## + +#### + +platform: 's2' + +## Mask conditions are stored as values, not as bitflags +bitmask: 0 +## Values to be to be included in cloudmask +## default [9,8,3,10,0,1] +tobemaskedlist: + - 9 ## cloud high probability + - 8 ## cloud medium probability + - 3 ## cloud shadow + - 10 ## cirrus + - 0 ## no data + - 1 ## saturated and defective + +##pattern to find all data files +filepattern: 'S2[A-C]_MSIL2A_\d{8}T\d{6}_N\d{4}_R\d{3}_T\d{2}[A-Z]{3}_\d{8}T\d{6}.SAFE$' + +## Sentinel-2 bands +red : 'B04' +green: 'B03' +blue: 'B02' +nir: 'B08' +r_edge: 'B05' +swir1: 'B11' +swir2: 'B12' + +## some part of the products file name that identifies them as product to be used +productnameidentifier: 'S2*.SAFE' + +## parts to build path towards bands +bandlocation: ['.','*','*','IMG_DATA'] + +## path building set after imgpath to get the bandfile +pathbuildinglist: ['R', 'pixelsize' , 'm','*', 'bandname', '_' , 'pixelsize' ,'m.jp2'] + +##indicator for cloudfile +cloudfilename: 'SCL' + +tilepattern: '(?<=T)[0-9]{2}[A-Z]{3}' + +##the first that is found is used +datepattern: '20[1-2][0-9][0-1][0-9][0-3][0-9]' + +band_designation: 'B[0-1]?\dA?' + +## Quantification value used to multiple the reflectance to get DN +## This could be read from metadata but for now at least is here +quantification_value: 10000 + +## available resolutions per band +B01: [60] +B02: [10,20,60] +B03: [10,20,60] +B04: [10,20,60] +B05: [20,60] +B06: [20,60] +B07: [20,60] +B08: [10] +B8A: [20,60] +B09: [60] +B11: [20,60] +B12: [20,60] +SCL: [20,60] +AOT: [10,20,60] +TCI: [10,20,60] +WVP: [10,20,60] + ]]> </configfile> + <configfile name="config_ls8"><![CDATA[ +platform: 'ls8' + +## Process only files with less than xx % cloudcover +maxcloudcover: 10 + + +## Mask conditions are not stored as values, but as individual 0/1 flags in bits +bitmask: 1 +## Bits to be to be included in cloudmask +## https://prd-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/atoms/files/LSDS-1328_Landsat8-9-OLI-TIRS-C2-L2-DFCB-v6.pdf +tobemaskedlist: + - 0 ## Fill data + - 1 ## Dilated cloud + - 2 ## Cirrus + - 3 ## Cloud + - 4 ## Cloud shadow + - 5 ## Snow + - 9 ## Cloud medium to high confidence + - 11 ## Cloud shadow high confidence + - 13 ## Snow/Ice high confidence + - 15 ## Cirrus high confidence + + +##pattern to find all data files +filepattern: 'LC08_L\d.._\d{6}_\d{8}_\d{8}_02_[TR][12T]$' + +##all bands are located in parent directory directly +bandlocation: ['.'] + +##indicator for cloudfile +cloudfilename: 'QA_PIXEL' + +## LS8 bands +## https://www.usgs.gov/media/images/landsat-8-band-designations +red : 'B4' +green: 'B3' +blue: 'B2' +nir: 'B5' +swir1: 'B6' +swir2: 'B7' + +tilepattern: '[0-9]{6}' + +## the first that is found is used +datepattern: '20[1-2][0-9][0-1][0-9][0-3][0-9]' + +band_designation: 'B\d?\d' + +##??????????? +## Quantification value used to multiple the reflectance to get DN +## This could be read from metadata but for now at least is here +quantification_value: 65535 + +## available resolutions per band +B1: [30] +B2: [30] +B3: [30] +B4: [30] +B5: [30] +B6: [30] +B7: [30] +QA_PIXEL: [30] +## to be continued + +##upsampling (converting to higher resolution/smaller cells) / downsampling (converting to lower resolution/larger cellsize) +##available: ‘nearest’, ‘bilinear’, ‘cubic’, ‘cubic_spline’, ‘lanczos’, ‘average’, ‘mode’, and ‘gauss’, +## from https://rasterio.readthedocs.io/en/latest/api/rasterio.enums.html#rasterio.enums.Resampling + +resampling_method: 'bilinear' + + +## path building set *after inpath* to get the bandfile + +pathbuildinglist: ['*', 'bandname', '*' , '.TIF'] + ]]> </configfile> + <configfile name="user_config"><![CDATA[ +#### + +## Configuration file for adjusting the process/results and give paths ## + +#### + +## Process only files with less than xx % cloudcover +maxcloudcover: 99 + +## Extract files with xx m pixel size +## options: 10,20 (for Sentinel-2) +## options: 30 (for Landsat8) + +#if str($input_type.platform) == 'ls8' +pixelsize: 30 +#else +pixelsize: 10 +#end if + +##resampling (converting to higher/lower resolution/smaller cells) +##available: 'biliner','nearest','cubic','average', ... +## from https://rasterio.readthedocs.io/en/latest/api/rasterio.enums.html#rasterio.enums.Resampling + +resampling_method: 'bilinear' + +##location of the shapefile with tiles (Sentinel2-tiles-world in case of Sentinel-2, WRS2_descending in case of Landsat 8) +#if str($input_type.platform) == 's2' +tileshp: './sentinel2_tiles_world/sentinel2_tiles_world' +#elif str($input_type.platform) == 'ls8' +tileshp: './WRS2_descending/WRS2_descending' +#end if + +##name of field where tilenames are stored in shapefile ('Name' in case of sentinel2_tiles_world, 'PR' in case of WRS2_descending ) +#if str($input_type.platform) == 'ls8' +fieldname: 'PR' +#else +fieldname: 'Name' +#end if + +## Lookup table for storing the tiles and the IDs they include for every tile processed by EODIE with --array_out +## Table needed for array plotting +lookup: './postprocesses/lookuptable.txt' + ]]> </configfile> + </configfiles> + </xml> + <xml name="citations"> + <citations> + <citation type="doi">10.5281/zenodo.4762323</citation> + </citations> + </xml> +</macros>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/postprocess_csv.sh Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +WORKDIR=$1 +RESDIR=$2 + +echo "====================================================================" +echo " Convert EODIE csv to tabular " +echo "====================================================================" + +nb_csv=$(find $WORKDIR -type f -name "*.csv" | wc -l) +echo "Number of csv file to convert: $nb_csv" +if [[ $nb_csv -gt 0 ]]; then + echo "Start" + for infile in $WORKDIR/*.csv; do + echo "processing $infile" + sed -i.bak -e "s/,/\t/g" $infile + mv $infile $RESDIR/. + done +fi + +echo "EODIE Tabular saved."
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/20211213-104427.log Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,6 @@ +INFO:root:All inputs for this process: dict_items([('platform', 'tif'), ('config', {'platform': 'tif', 'datepattern': '20[1-2][0-9][0-1][0-9][0-3][0-9]', 'name': 'testrgb', 'maxcloudcover': 99, 'pixelsize': 10, 'resampling_method': 'bilinear', 'fieldname': 'Name', 'lookup': './postprocesses/lookuptable.txt'}), ('mydir', None), ('myfile', 'smaller_area_20100401.tif'), ('input', ['smaller_area_20100401.tif']), ('shpbase', 'shapefile'), ('outpath', './results'), ('idname', 'id'), ('statistics_out', True), ('array_out', False), ('indexlist', None), ('statistics', ['count', 'mean', 'percentile_10']), ('startdate', '20160101'), ('enddate', '20211221'), ('keep_shp', False), ('geotiff_out', False), ('test', False), ('exclude_border', False), ('extmask', None), ('exclude_splitshp', True), ('verbose', False), ('format', ['statistics'])]) +INFO:root:File to be processed smaller_area_20100401.tif +INFO:root:Checking the projection of the inputfile now +INFO:root:Reprojectcommand: ogr2ogr -t_srs EPSG:3067 shapefile_reprojected_3067.shp shapefile.shp +INFO:root:input shapefile had other than EPSG 3067 but was reprojected and works now +INFO:root:stat to csv in: ./results/testrgb_20100401__statistics.csv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/20211217-210343.log Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,21 @@ +INFO:root:All inputs for this process: dict_items([('platform', 's2'), ('config', {'platform': 's2', 'bitmask': 0, 'tobemaskedlist': [9, 8, 3, 10, 0, 1], 'filepattern': 'S2[A-C]_MSIL2A_\\d{8}T\\d{6}_N\\d{4}_R\\d{3}_T\\d{2}[A-Z]{3}_\\d{8}T\\d{6}.SAFE$', 'red': 'B04', 'green': 'B03', 'blue': 'B02', 'nir': 'B08', 'r_edge': 'B05', 'swir1': 'B11', 'swir2': 'B12', 'productnameidentifier': 'S2*.SAFE', 'bandlocation': ['.', '*', '*', 'IMG_DATA'], 'pathbuildinglist': ['R', 'pixelsize', 'm', '*', 'bandname', '_', 'pixelsize', 'm.jp2'], 'cloudfilename': 'SCL', 'tilepattern': '(?<=T)[0-9]{2}[A-Z]{3}', 'datepattern': '20[1-2][0-9][0-1][0-9][0-3][0-9]', 'band_designation': 'B[0-1]?\\dA?', 'quantification_value': 10000, 'B01': [60], 'B02': [10, 20, 60], 'B03': [10, 20, 60], 'B04': [10, 20, 60], 'B05': [20, 60], 'B06': [20, 60], 'B07': [20, 60], 'B08': [10], 'B8A': [20, 60], 'B09': [60], 'B11': [20, 60], 'B12': [20, 60], 'SCL': [20, 60], 'AOT': [10, 20, 60], 'TCI': [10, 20, 60], 'WVP': [10, 20, 60], 'maxcloudcover': 99, 'pixelsize': 10, 'resampling_method': 'bilinear', 'tileshp': './sentinel2_tiles_world/sentinel2_tiles_world', 'fieldname': 'Name', 'lookup': './postprocesses/lookuptable.txt'}), ('mydir', 'data_collection'), ('myfile', None), ('input', ['data_collection/S2B_MSIL2A_20200626T095029_N0214_R079_T34VFN_20200626T123234.SAFE']), ('shpbase', 'shapefile'), ('outpath', './results'), ('idname', 'ID'), ('statistics_out', True), ('array_out', False), ('indexlist', ['ndvi']), ('statistics', ['count', 'mean', 'median', 'std']), ('startdate', '20160101'), ('enddate', '20211221'), ('keep_shp', False), ('geotiff_out', False), ('test', False), ('exclude_border', False), ('extmask', None), ('exclude_splitshp', False), ('verbose', False), ('format', ['statistics'])]) +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:number of usable cores for shapesplitting is 2 +INFO:root:splitted shapefiles now exist +INFO:root:deleted splitted worldtiles +INFO:root:Imagepath is data_collection/S2B_MSIL2A_20200626T095029_N0214_R079_T34VFN_20200626T123234.SAFE/./GRANULE/L2A_T34VFN_A017265_20200626T095032/IMG_DATA +INFO:root:Tile is 34VFN +INFO:root:Date is 20200626 +INFO:root:Shape of cloudmask is (10980, 10980) +INFO:root:Checking the projection of the inputfile now +INFO:root:Reprojectcommand: ogr2ogr -t_srs EPSG:32634 EODIE_temp_shp/shapefile_34VFN_reprojected_32634.shp EODIE_temp_shp/shapefile_reprojected_4326_34VFN.shp +INFO:root:input shapefile had other than EPSG 32634 but was reprojected and works now +INFO:root:Cloudcover below 99: True +INFO:root:Data withing area of interest: True +INFO:root:stat to csv in: ./results/ndvi_20200626_34VFN_statistics.csv +INFO:root:deleted splitted shapefiles
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/20211218-102629.log Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,19 @@ +INFO:root:All inputs for this process: dict_items([('platform', 'ls8'), ('config', {'platform': 'ls8', 'maxcloudcover': 99, 'bitmask': 1, 'tobemaskedlist': [0, 1, 2, 3, 4, 5, 9, 11, 13, 15], 'filepattern': 'LC08_L\\d.._\\d{6}_\\d{8}_\\d{8}_02_[TR][12T]$', 'bandlocation': ['.'], 'cloudfilename': 'QA_PIXEL', 'red': 'B4', 'green': 'B3', 'blue': 'B2', 'nir': 'B5', 'swir1': 'B6', 'swir2': 'B7', 'tilepattern': '[0-9]{6}', 'datepattern': '20[1-2][0-9][0-1][0-9][0-3][0-9]', 'band_designation': 'B\\d?\\d', 'quantification_value': 65535, 'B1': [30], 'B2': [30], 'B3': [30], 'B4': [30], 'B5': [30], 'B6': [30], 'B7': [30], 'QA_PIXEL': [30], 'resampling_method': 'bilinear', 'pathbuildinglist': ['*', 'bandname', '*', '.TIF'], 'pixelsize': 30, 'tileshp': './WRS2_descending/WRS2_descending', 'fieldname': 'PR', 'lookup': './postprocesses/lookuptable.txt'}), ('mydir', 'data_collection'), ('myfile', None), ('input', ['data_collection/LC08_L2SP_189017_20190628_20200827_02_T1']), ('shpbase', 'shapefile'), ('outpath', './results'), ('idname', 'ID'), ('statistics_out', True), ('array_out', False), ('indexlist', ['ndvi']), ('statistics', ['count', 'mean', 'median', 'std']), ('startdate', '20160101'), ('enddate', '20211221'), ('keep_shp', False), ('geotiff_out', False), ('test', False), ('exclude_border', False), ('extmask', None), ('exclude_splitshp', False), ('verbose', False), ('format', ['statistics'])]) +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:checking the projection of the inputfile now +INFO:root:input shapefile had other than EPSG 4326 but was reprojected and works now +INFO:root:number of usable cores for shapesplitting is 2 +INFO:root:splitted shapefiles now exist +INFO:root:deleted splitted worldtiles +INFO:root:Imagepath is data_collection/LC08_L2SP_189017_20190628_20200827_02_T1/. +INFO:root:Tile is 189017 +INFO:root:Date is 20190628 +INFO:root:Shape of cloudmask is (40, 27) +INFO:root:Checking the projection of the inputfile now +INFO:root:Reprojectcommand: ogr2ogr -t_srs EPSG:32635 EODIE_temp_shp/shapefile_189017_reprojected_32635.shp EODIE_temp_shp/shapefile_reprojected_4326_189017.shp +INFO:root:input shapefile had other than EPSG 32635 but was reprojected and works now +INFO:root:stat to csv in: ./results/ndvi_20190628_189017_statistics.csv +INFO:root:deleted splitted shapefiles
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/WRS2_descending.html Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,9 @@ +<html><head><title>Shapefile Galaxy Composite Dataset</title></head><p/> +<div>This composite dataset is composed of the following files:<p/><ul> +<li><a href="WRS2_descending.shp" type="application/binary">shapefile.shp (Geometry File (shp))</a></li> +<li><a href="WRS2_descending.shx" type="application/binary">shapefile.shx (Geometry index File (shx))</a></li> +<li><a href="WRS2_descending.dbf" type="application/binary">shapefile.dbf (Columnar attributes for each shape (dbf))</a></li> +<li><a href="WRS2_descending.prj" type="application/binary">shapefile.prj (Projection description (prj))</a> (optional)</li> +<li><a href="WRS2_descending.sbn" type="application/binary">shapefile.sbn (Spatial index of the features (sbn))</a> (optional)</li> +<li><a href="WRS2_descending.sbx" type="application/binary">shapefile.sbx (Spatial index of the features (sbx))</a> (optional)</li> +</ul></div></html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/WRS2_descending/WRS2_descending.prj Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/ndvi_20190628_189017_statistics.csv Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,4 @@ +id count mean median std +0 32 0.384 0.387 0.033 +1 81 0.304 0.295 0.078 +2 48 0.424 0.432 0.031
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/ndvi_20200626_34VFN_statistics.csv Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,4 @@ +id count mean median std +0 204 0.321 0.302 0.091 +1 551 0.293 0.294 0.088 +2 335 0.661 0.686 0.091
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/sentinel2_tiles_world.html Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,7 @@ +<html><head><title>Shapefile Galaxy Composite Dataset</title></head><p/> +<div>This composite dataset is composed of the following files:<p/><ul> +<li><a href="sentinel2_tiles_world.shp" type="application/binary">shapefile.shp (Geometry File (shp))</a></li> +<li><a href="sentinel2_tiles_world.shx" type="application/binary">shapefile.shx (Geometry index File (shx))</a></li> +<li><a href="sentinel2_tiles_world.dbf" type="application/binary">shapefile.dbf (Columnar attributes for each shape (dbf))</a></li> +<li><a href="sentinel2_tiles_world.prj" type="application/binary">shapefile.prj (Projection description (prj))</a> (optional)</li> +</ul></div></html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/sentinel2_tiles_world/COPYING.txt Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,3 @@ +© Copernicus Sentinel data 2016 downloaded from https://scihub.copernicus.eu/ + +Read the TERMS AND CONDITIONS \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/sentinel2_tiles_world/readme.txt Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,3 @@ +© Copernicus Sentinel data 2016 downloaded from https://scihub.copernicus.eu/ + +Sentinel-2 tiling grid adapted from the kml file downloaded from ESA (https://sentinel.esa.int/web/sentinel/missions/sentinel-2/data-products) at the link https://sentinel.esa.int/documents/247904/1955685/S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/sentinel2_tiles_world/sentinel2_tiles_world.prj Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/sentinel2_tiles_world/sentinel2_tiles_world.qpj Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_parcels_32635.html Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,7 @@ +<html><head><title>Shapefile Galaxy Composite Dataset</title></head><p/> +<div>This composite dataset is composed of the following files:<p/><ul> +<li><a href="test_parcels_32635.shp" type="application/binary">shapefile.shp (Geometry File (shp))</a></li> +<li><a href="test_parcels_32635.shx" type="application/binary">shapefile.shx (Geometry index File (shx))</a></li> +<li><a href="test_parcels_32635.dbf" type="application/binary">shapefile.dbf (Columnar attributes for each shape (dbf))</a></li> +<li><a href="test_parcels_32635.prj" type="application/binary">shapefile.prj (Projection description (prj))</a> (optional)</li> +</ul></div></html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_parcels_32635/test_parcels_32635.cpg Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +UTF-8 \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_parcels_32635/test_parcels_32635.prj Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +PROJCS["WGS_1984_UTM_Zone_35N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",27.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_polygons.html Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,7 @@ +<html><head><title>Shapefile Galaxy Composite Dataset</title></head><p/> +<div>This composite dataset is composed of the following files:<p/><ul> +<li><a href="test_polygons.shp" type="application/binary">shapefile.shp (Geometry File (shp))</a></li> +<li><a href="test_polygons.shx" type="application/binary">shapefile.shx (Geometry index File (shx))</a></li> +<li><a href="test_polygons.dbf" type="application/binary">shapefile.dbf (Columnar attributes for each shape (dbf))</a></li> +<li><a href="test_polygons.prj" type="application/binary">shapefile.prj (Projection description (prj))</a> (optional)</li> +</ul></div></html>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/test_polygons/test_polygons.prj Thu Dec 30 15:24:09 2021 +0000 @@ -0,0 +1,1 @@ +PROJCS["EUREF_FIN_TM35FIN",GEOGCS["GCS_ETRS_1989",DATUM["D_ETRS_1989",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",27.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] \ No newline at end of file