Next changeset 1:1ff618d89af4 (2024-10-23) |
Commit message:
planemo upload for repository https://github.com/AquaINFRA/tools-ecology/tree/master commit 6db8e8425f0525fc2e5df8cb43beb3b14024d0ab |
added:
aquainfra_ogc_api_processes.R aquainfra_ogc_api_processes.xml macros.xml test-data/points_att_polygon_test_input_1.txt test-data/points_att_polygon_test_input_2.txt test-data/points_att_polygon_test_input_3.txt |
b |
diff -r 000000000000 -r 0077885b6f1d aquainfra_ogc_api_processes.R --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/aquainfra_ogc_api_processes.R Mon Oct 14 12:23:01 2024 +0000 |
[ |
b'@@ -0,0 +1,329 @@\n+library("httr2")\n+library("jsonlite")\n+library("getopt")\n+\n+cat("start generic wrapper service \\n")\n+\n+remove_null_values <- function(x) {\n+ # Check if the input is a list\n+ if (is.list(x)) {\n+ # Remove NULL values and apply the function recursively to sublists\n+ x <- lapply(x, remove_null_values)\n+ x <- x[!sapply(x, is.null)]\n+ }\n+ return(x)\n+}\n+\n+getParameters <- function() {\n+ con <- file("inputs.json", "r")\n+ lines <- readLines(con)\n+ close(con)\n+ \n+ json_string <- paste(lines, collapse = "\\n")\n+ json_data <- fromJSON(json_string)\n+ \n+ # Remove NULL values from json_data\n+ cleaned_json_data <- remove_null_values(json_data)\n+ return(cleaned_json_data$conditional_process)\n+}\n+\n+parseResponseBody <- function(body) {\n+ hex <- c(body)\n+ intValues <- as.integer(hex)\n+ rawVector <- as.raw(intValues)\n+ readableOutput <- rawToChar(rawVector)\n+ jsonObject <- jsonlite::fromJSON(readableOutput)\n+ return(jsonObject)\n+}\n+\n+getOutputs <- function(inputs, output, server) {\n+ url <-\n+ paste(paste(server, "/processes/", sep = ""),\n+ inputs$select_process,\n+ sep = "")\n+ request <- request(url)\n+ response <- req_perform(request)\n+ responseBody <- parseResponseBody(response$body)\n+ outputs <- list()\n+ \n+ for (x in 1:length(responseBody$outputs)) {\n+ outputformatName <-\n+ paste(names(responseBody$outputs[x]), "_outformat", sep = "")\n+ output_item <- list()\n+ \n+ for (p in names(inputs)) {\n+ if (p == outputformatName) {\n+ format <- list("mediaType" = inputs[[outputformatName]])\n+ output_item$format <- format\n+ }\n+ }\n+ output_item$transmissionMode <- "reference"\n+ outputs[[x]] <- output_item\n+ }\n+ \n+ names(outputs) <- names(responseBody$outputs)\n+ return(outputs)\n+}\n+\n+executeProcess <- function(url, process, requestBodyData) {\n+ url <-\n+ paste(paste(paste(url, "processes/", sep = ""), process, sep = ""), "/execution", sep = "")\n+ requestBodyData$inputs$select_process <- NULL\n+ \n+ body <- list()\n+ body$inputs <- requestBodyData$inputs\n+ \n+ response <- request(url) %>%\n+ req_headers("Content-Type" = "application/json",\n+ "Prefer" = "respond-async") %>%\n+ req_body_json(body) %>%\n+ req_perform()\n+ \n+ cat("\\n Process executed")\n+ cat("\\n status: ", response$status_code)\n+ #if ( process == "barplot-trend-results") {\n+ # process = "batplot-trend-results"\n+ #}\n+ #href <- parseResponseBody(response$body)$outputs[[gsub("-", "_", process)]]$href\n+ jobId <- parseResponseBody(response$body)$jobID\n+ \n+ return(jobId)\n+}\n+\n+checkJobStatus <- function(server, process, jobID) {\n+ url <- paste0(server, "jobs/", jobID)\n+ response <- request(url) %>%\n+ req_perform()\n+ jobStatus <- parseResponseBody(response$body)$status\n+ jobProgress <- parseResponseBody(response$body)$progress\n+ return(jobStatus)\n+}\n+\n+getStatusCode <- function(server, process, jobID) {\n+ url <- paste0(server, "jobs/", jobID)\n+ print(url)\n+ response <- request(url) %>%\n+ req_perform()\n+ status_code <- response$status_code\n+ return(status_code)\n+}\n+\n+getResult <- function (server, process, jobID) {\n+ response <-\n+ request(paste0(server, "jobs/", jobID, "/results?f=json")) %>%\n+ req_perform()\n+ return(response)\n+}\n+\n+# Recursive function to search for href in a nested list\n+findHref <- function(obj) {\n+ hrefs <- c() # Initialize an empty vector to store hrefs\n+ \n+ if (is.list(obj)) {\n+ # If the object is a list, loop through its elements\n+ for (name in names(obj)) {\n+ element <- obj[[name]]\n+ \n+ if (is.list(element)) {\n+ # Recursively search if the element is another list\n+ hrefs <- c(hrefs, findHref(element))\n+ } else if (name == "href") {\n+ # If the element has a name "href", capture its value\n+ hrefs <- c(hrefs, element)\n+ }\n+ }\n+ }\n+ return(hrefs)\n+}\n+\n+retrieveResults <- function(server, process, jobID, outputData) {\n+ status_code <'..b' <- file(inputParameters[[key]], "r")\n+ url_list <- list()\n+ #while (length(line <- readLines(con, n = 1)) > 0) {\n+ # if (is_url(line)) {\n+ # url_list <- c(url_list, list(list(href = trimws(line))))\n+ # }\n+ #}\n+ con <- file(inputParameters[[key]], "r")\n+ lines <- readLines(con)\n+ print("--------------------------------------------------------------------1")\n+ print(length(lines))\n+ close(con)\n+ if (!length(lines) > 1 && endsWith(lines, ".jp2") && startsWith(lines, "https")) {\n+ print("--------------------------------------------------------------------2")\n+ tmp <- list()\n+ tmp$href <- lines\n+ tmp$type <- "image/jp2"\n+ inputParameters[[key]] <- tmp\n+ }\n+ else if (!length(lines) > 1 && endsWith(lines, ".zip") && startsWith(lines, "https")) {\n+ print("--------------------------------------------------------------------3")\n+ json_string <- paste(lines, collapse = "\\n")\n+ inputParameters[[key]] <- json_string\n+ } else if (!length(lines) > 1 && (endsWith(lines, ".xlsx") || endsWith(lines, ".csv") || grepl("f=csv", lines)) && startsWith(lines, "https")) {\n+ print("--------------------------------------------------------------------4")\n+ json_string <- paste(lines, collapse = "\\n")\n+ inputParameters[[key]] <- json_string\n+ } else if (inputParameters$select_process == "plot-image" ||\n+ inputParameters$select_process == "reproject-image") {\n+ print("--------------------------------------------------------------------5")\n+ tmp <- list()\n+ tmp$href <- lines\n+ tmp$type <- "image/tiff; application=geotiff"\n+ if (inputParameters$select_process == "reproject-image") {\n+ tmp$type <- "image/tiff; subtype=geotiff"\n+ }\n+ inputParameters[[key]] <- tmp\n+ } else {\n+ print("-----------------------------------6")\n+ json_string <- paste(lines, collapse = "\\n")\n+ json_data <- fromJSON(json_string)\n+ inputParameters[[key]] <- json_data\n+ }\n+ convertedKeys <- append(convertedKeys, key)\n+ }\n+ else if (grepl("_Array_", key)) {\n+ keyParts <- strsplit(key, split = "_")[[1]]\n+ type <- keyParts[length(keyParts)]\n+ values <- inputParameters[[key]]\n+ value_list <- strsplit(values, split = ",")\n+ convertedValues <- c()\n+ \n+ for (value in value_list) {\n+ if (type == "integer") {\n+ value <- as.integer(value)\n+ } else if (type == "numeric") {\n+ value <- as.numeric(value)\n+ } else if (type == "character") {\n+ value <- as.character(value)\n+ }\n+ convertedValues <- append(convertedValues, value)\n+ \n+ convertedKey <- ""\n+ for (part in keyParts) {\n+ if (part == "Array") {\n+ break\n+ }\n+ convertedKey <-\n+ paste(convertedKey, paste(part, "_", sep = ""), sep = "")\n+ }\n+ convertedKey <- substr(convertedKey, 1, nchar(convertedKey) - 1)\n+ }\n+ \n+ inputParameters[[key]] <- convertedValues\n+ print("-------------------------")\n+ print(convertedValues)\n+ print("-------------------------")\n+ convertedKeys <- append(convertedKeys, convertedKey)\n+ } else {\n+ print("-------------------------")\n+ print(key)\n+ print(inputParameters[[key]])\n+ if (!is.null(inputParameters[[key]])) {\n+ convertedKeys <- append(convertedKeys, key)\n+ }\n+ print("-------------------------")\n+ \n+ }\n+}\n+print(inputParameters)\n+names(inputParameters) <- convertedKeys\n+#print(inputParameters)\n+print("--> Inputs parsed")\n+\n+print("--> Prepare process execution")\n+jsonData <- list("inputs" = inputParameters,\n+ "outputs" = outputs)\n+\n+print("--> Execute process")\n+jobId <- executeProcess(server, inputParameters$select_process, jsonData)\n+print("--> Process executed")\n+\n+print("--> Retrieve results")\n+retrieveResults(server, inputParameters$select_process, jobId, outputLocation)\n+print("--> Results retrieved")\n\\ No newline at end of file\n' |
b |
diff -r 000000000000 -r 0077885b6f1d aquainfra_ogc_api_processes.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/aquainfra_ogc_api_processes.xml Mon Oct 14 12:23:01 2024 +0000 |
[ |
b'@@ -0,0 +1,100 @@\n+<tool id="aquainfra_ogc_api_processes" name="AquaINFRA OGC API Processes" version="0.1.0" profile="22.05">\n+ <description/>\n+ <macros>\n+ <import>macros.xml</import>\n+ </macros>\n+\t<creator>\n+\t <organization name="EOSC AquaINFRA" url="https://aquainfra.eu/"/>\n+\t</creator>\n+ <expand macro="requirements"/>\n+ <command detect_errors="exit_code"><![CDATA[\n+ Rscript \'$__tool_directory__/aquainfra_ogc_api_processes.R\'\n+ --outputData \'$output_data\'\n+ ]]></command>\n+ <configfiles>\n+ <inputs name="inputs" filename="inputs.json" data_style="paths"/>\n+ </configfiles>\n+ <inputs>\n+ <conditional name="conditional_process">\n+ <param name="select_process" type="select" label="Select process">\n+ <option value="points-att-polygon">points-att-polygon: Group points by region</option>\n+ <option value="peri-conv">peri-conv: Group data to groups based on date</option>\n+ <option value="mean-by-group">mean-by-group: Return group average</option>\n+ <option value="ts-selection-interpolation">ts-selection-interpolation: Select and Interpolate Time Series</option>\n+ <option value="trend-analysis-mk">trend-analysis-mk: Man-Kendall Trend Analysis on Time Series</option>\n+ <option value="barplot-trend-results">barplot-trend-results: Visualisation of statistical analysis results</option>\n+ <option value="map-trends-static">map-trends-static: Spatial visualisation of regions and data points</option>\n+ <option value="map-shapefile-points">map-shapefile-points: Spatial visualisation of regions and data points</option>\n+ </param>\n+ <when value="points-att-polygon">\n+ <param name="regions" label="Study region or study subregions" optional="false" help="URL (stored in a .txt file) to the study region, or several regions, to classify your input data into groups of interest. Currently it has to be provided as a shapefile. It can be in any coordinate system and will be transformed to WGS84 during this process." type="data" format="txt"/>\n+ <param name="input_data" label="Table to be merged with study region" optional="false" help="URL (stored in a .txt file) to the input table containing the in-situ data points with coordinates. Can be provided as Excel file or CSV file (comma-separated text file). The coordinates have to be in WGS84 coordinate system." type="data" format="txt"/>\n+ <param name="colname_long" label="Column name for longitude" optional="false" help="Name of the column that contains longitude values (in WGS84)." type="text"/>\n+ <param name="colname_lat" label="Column name for latitude" optional="false" help="Name of the column that contains latitude values (in WGS84)." type="text"/>\n+ </when>\n+ <when value="peri-conv">\n+ <param name="input_data" label="Table to be grouped by date, with date colum" help="URL (stored in a .txt file) to the table with a column containing a date. It can have other columns which will not be changed during this process." type="data" format="txt"/>\n+ <param name="colname_date" label="Date column name" optional="false" help="Column name of the date column in the format defined above. Example: \'visit date\'." type="text"/>\n+ <param name="date_format" label="Date format" optional="true" help="The date format used to parse the date, i.e. to extract day, month and year from the date column, e.g. or \'y-m-d\' for dates like \'1998-08-22\' (this is the default) or \'y/m/d\' for dates like \'1998/08/22\'." type="select">\n+\t\t\t\t <option value="y-m-d">y-m-d</option>\n+ <option value="y/m/d">y/m/d</option>\n+\t\t\t\t</param>\n+ <param name="group_to_periods" label="Periods to group the data into" optional="true" help="Define the periods that you want the data to be g'..b'\n+\t\t\t\t<param name="colname_group" label="Column name for subgroups" optional="false" help="The name of the column that defines the subgroups or categories to be displayed on the X-axis, e.g., seasons for every polygon_id." type="text"/>\n+\t\t\t</when>\n+ <when value="map-trends-static">\n+\t\t\t\t<param name="regions" label="Study region or study subregions" optional="false" help="URL (stored in a .txt file) to the study region, or several regions, used to classify the input data into groups of interest. Currently it has to be provided as a shapefile. It can be in any coordinate system and will be transformed to WGS84 during this process." type="data" format="txt"/>\n+\t\t\t\t<param name="input_data" label="Input table" optional="false" help="URL to the input table containing statistical analysis results. The table must include columns for test values, p-values, and identifiers linking to study region." type="data" format="txt"/>\n+\t\t\t\t<param name="colname_id_trend" label="Column name of study region identifier" optional="false" help="The name of the column containing identifiers for study regions, which must correspond to the identifiers in the shapefile (shp). Example = \'id\'." type="text"/>\n+\t\t\t\t<param name="colname_region_id" label="Column name of study region identifier" optional="false" help="The name of the column in the input data that contains identifiers for study regions, corresponding to the identifiers in the shapefile. Example = \'id\'." type="text"/>\n+\t\t\t\t<param name="colname_group" label="Column name for subgroups" optional="false" help="The name of the column that defines the subgroups or categories to be displayed on the X-axis, e.g., seasons for every polygon_id." type="text"/>\n+\t\t\t\t<param name="p_value_threshold" label="p value threshold for significance" optional="false" help="The threshold for distinguishing significant from insignificant values. It adjusts the transparency of bars in the plot. Example = 0.05." value="0.05" min="0.01" max="0.1" type="float"/>\n+\t\t\t\t<param name="colname_p_value" label="Column name for p value" optional="false" help="The name of the column containing p values, used to determine bar transparency. Example = \'p_value\'" type="text"/>\n+\t\t\t</when>\n+ <when value="map-shapefile-points">\n+\t\t\t\t<param name="regions" label="Study region or study subregions" optional="false" help="URL (stored in a .txt file) to the study region, or several regions, to classify the input data into groups of interest. Currently it has to be provided as a shapefile. It can be in any coordinate system and will be transformed to WGS84 during this process." type="data" format="txt"/>\n+\t\t\t\t<param name="input_data" label="Data table to be plotted on top of study regions" optional="false" help="URL (stored in a .txt file) to the input table containing the in-situ data points with coordinates." type="data" format="txt"/>\n+\t\t\t\t<param name="colname_long" label="Column name for longitude" optional="false" help="The name of the column containing longitude values for the data points." type="text"/>\n+\t\t\t\t<param name="colname_lat" label="Column name for latiitude" optional="false" help="The name of the column containing latitude values for the data points." type="text"/>\n+\t\t\t\t<param name="colname_value_name" label="Column name of data point identifier" optional="false" help="The name of the column containing identifier (e.g., site name) or values (e.g., depth) to color the points according to their corresponding values." type="text"/>\n+\t\t\t\t<param name="colname_region_id" label="Column name of region identifier" optional="false" help="The name of the column containing identifiers (e.g., basin name) to distinguish the polygons on the map if multiple regions are present." type="text"/>\n+\t\t\t</when>\n+ </conditional>\n+ </inputs>\n+\t<outputs>\n+\t\t<data name="output_data" format="txt" label="$select_process"/>\n+\t</outputs>\n+ <expand macro="tests"/>\n+\t<expand macro="help" />\n+\t<expand macro="citations"/>\n+</tool>\n' |
b |
diff -r 000000000000 -r 0077885b6f1d macros.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/macros.xml Mon Oct 14 12:23:01 2024 +0000 |
b |
@@ -0,0 +1,51 @@ +<macros> + <xml name="requirements"> + <requirements> + <requirement type="package" version="4.3.1">r-base</requirement> + <requirement type="package" version="1.20.4">r-getopt</requirement> + <requirement type="package" version="0.2.3">r-httr2</requirement> + <requirement type="package" version="1.8.7">r-jsonlite</requirement> + </requirements> + </xml> + <xml name="citations"> + <citations> + <citation type="bibtex">@Manual{httr2, title = {httr2: Perform HTTP Requests and Process the Responses}, author = {Hadley Wickham}, year = {2023}, note = {R package version 1.0.0, https://github.com/r-lib/httr2}, url = {https://httr2.r-lib.org},}</citation> + <citation type="doi">10.48550/arXiv.1403.2805</citation> + </citations> + </xml> + <xml name="help"> + <help> + Use the dropdown menu at the top to select the OGC API processes hosted on https://aqua.igb-berlin.de/pygeoapi-dev/processes and then complete the corresponding form to run the service. + </help> + </xml> + <xml name="tests"> + <tests> + <test> + <param name="select_process" value="points-att-polygon"/> + <param name="regions" value="points_att_polygon_test_input_1.txt"/> + <param name="input_data" value="points_att_polygon_test_input_2.txt"/> + <param name="colname_long" value="longitude"/> + <param name="colname_lat" value="latitude"/> + <output name="output_data"> + <assert_contents> + <has_n_lines n="1"/> + </assert_contents> + </output> + </test> + <test> + <param name="select_process" value="map-shapefile-points"/> + <param name="regions" value="points_att_polygon_test_input_1.txt"/> + <param name="input_data" value="points_att_polygon_test_input_3.txt"/> + <param name="colname_long" value="longitude"/> + <param name="colname_lat" value="latitude"/> + <param name="colname_value_name" value="transparency_m"/> + <param name="colname_region_id" value="HELCOM_ID"/> + <output name="output_data"> + <assert_contents> + <has_n_lines n="1"/> + </assert_contents> + </output> + </test> + </tests> + </xml> +</macros> \ No newline at end of file |
b |
diff -r 000000000000 -r 0077885b6f1d test-data/points_att_polygon_test_input_1.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/points_att_polygon_test_input_1.txt Mon Oct 14 12:23:01 2024 +0000 |
b |
@@ -0,0 +1,1 @@ +https://maps.helcom.fi/arcgis/rest/directories/arcgisoutput/MADS/tools_GPServer/_ags_HELCOM_subbasin_with_coastal_WFD_waterbodies_or_wa.zip \ No newline at end of file |
b |
diff -r 000000000000 -r 0077885b6f1d test-data/points_att_polygon_test_input_2.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/points_att_polygon_test_input_2.txt Mon Oct 14 12:23:01 2024 +0000 |
b |
@@ -0,0 +1,1 @@ +https://aqua.igb-berlin.de/download/testinputs/in_situ_example.xlsx \ No newline at end of file |
b |
diff -r 000000000000 -r 0077885b6f1d test-data/points_att_polygon_test_input_3.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/points_att_polygon_test_input_3.txt Mon Oct 14 12:23:01 2024 +0000 |
b |
@@ -0,0 +1,1 @@ +https://aqua.igb-berlin.de/download/data_merged_with_regions-06550086-857a-11ef-8e41-e14810fdd7f8.csv \ No newline at end of file |