Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion R/atlas_media.R
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,15 @@ build_media_id <- function(df, media_fields){
}else{
purrr::map(media_fields, .f = \(a){
df |>
tidyr::unnest_longer(col = a) |>
tidyr::unnest_longer(col = tidyselect::any_of(a)) |>
dplyr::mutate(media_id = as.character(.data[[a]]),
media_type = as.character(a),
.before = 1) |>
dplyr::filter(!is.na(.data$media_id)) |>
dplyr::select(- tidyselect::any_of(media_fields))
# break pipe at this point because tidyselect can't handle -any_of()
# keep_cols <- colnames(result)[!(colnames(result) %in% media_fields)]
# dplyr::select(result, tidyselect::any_of(keep_cols))
}) |>
dplyr::bind_rows()
}
Expand Down
2 changes: 1 addition & 1 deletion R/capture_species.R
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ capture_species_atlas <- function(.query){
if(!is.null(.query$group_by)){
.query$distinct$name <- .query$group_by$name
}else{
cli::cli_error("No variable supplied to `distinct()`")
cli::cli_abort("No variable supplied to `distinct()`")
}
}
}
Expand Down
7 changes: 3 additions & 4 deletions R/check.R
Original file line number Diff line number Diff line change
Expand Up @@ -277,8 +277,8 @@ check_fields_gbif_counts <- function(.query){
# First get filters
# set fields that can be queried using predicates or downloaded
valid_download_fields <- .query[["metadata/fields"]] |>
filter(download_field == TRUE) |>
dplyr::pull(id)
dplyr::filter(.data$download_field == TRUE) |>
dplyr::pull("id")
valid_assertions <- .query[["metadata/assertions"]]$id
valid_any <- c(valid_download_fields, valid_assertions)

Expand All @@ -302,13 +302,12 @@ check_fields_gbif_counts <- function(.query){
facets <- .query$body$group_by$name
# check for invalid facets
valid_search_fields <- .query[["metadata/fields"]] |>
filter(search_field == TRUE) |>
dplyr::filter(.data$search_field == TRUE) |>
dplyr::pull(id)
if (!all(facets %in% valid_search_fields)) {
invalid_facets <- facets[!(facets %in% valid_search_fields)]
group_by_invalid <- glue::glue_collapse(invalid_facets, sep = ", ")
}
# }
}

c(filter_invalid, group_by_invalid)
Expand Down
8 changes: 4 additions & 4 deletions R/collect_distributions.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ collect_distributions <- function(.query){
result <- query_API(.query)
# NOTE: below is consistent with `collect_distributions_metadata()` (+ geometry)
result <- result |>
bind_rows() |>
select("gid",
dplyr::bind_rows() |>
dplyr::select("gid",
"family",
"genus_name",
"scientific",
Expand All @@ -16,14 +16,14 @@ collect_distributions <- function(.query){
"area_km",
"data_resource_uid",
"geometry") |>
rename(
dplyr::rename(
"id" = "gid", # this is chosen as ID because it is called by later APIs
"genus" = "genus_name",
"species" = "scientific",
"taxon_concept_id" = "lsid",
"label" = "area_name",
"common_name" = "common_nam") |>
mutate("common_name" = trimws(.data$common_name))
dplyr::mutate("common_name" = trimws(.data$common_name))
result$geometry <- sf::st_as_sfc(result$geometry, crs=4326)
return(sf::st_as_sf(result))
}
6 changes: 3 additions & 3 deletions R/collect_metadata.R
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ tidy_list_columns <- function(x){
list_names <- names(x)[list_check]
list_tibbles <- purrr::map(list_names,
\(a){
tibble::tibble({{a}} := list(x[[a]]))
tibble::tibble({{a}} == list(x[[a]]))
})
}else{
list_tibbles <- NULL
Expand All @@ -51,7 +51,7 @@ tidy_list_columns <- function(x){
make_nulls_safe() |>
tibble::as_tibble() |>
dplyr::bind_cols(list_tibbles) |>
dplyr::select(!!!names(x)) # reorder columns to same as `x`
dplyr::select(tidyselect::any_of(names(x))) # reorder columns to same as `x`
}

#' Internal function to ensure rows can be converted to tibble
Expand Down Expand Up @@ -264,7 +264,7 @@ collect_fields <- function(.query){

if(is_gbif()){
# we need to join some local metadata to GBIF info
df <- galah:::gbif_internal_archived$search_fields |>
df <- gbif_internal_archived$search_fields |>
dplyr::mutate(search_field = TRUE)

# organise information from the API
Expand Down
7 changes: 3 additions & 4 deletions R/collect_metadata_unnest.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ collect_fields_unnest <- function(.query,
purrr::pluck(!!!list("facets", 1, "counts")) |>
dplyr::bind_rows() |>
dplyr::rename_with(camel_to_snake_case) |>
dplyr::rename({{facet}} := "name") |>
dplyr::rename({{facet}} == "name") |>
parse_select(.query)

}else{
Expand All @@ -33,10 +33,10 @@ collect_fields_unnest <- function(.query,
if(nrow(result) > 0){
result |>
dplyr::mutate(
field_name := stringr::str_extract(result$i18nCode, "(?<=\\.).*"),
field_name == stringr::str_extract(result$i18nCode, "(?<=\\.).*"),
.before = 1) |>
dplyr::rename_with(camel_to_snake_case) |>
dplyr::rename({{facet}} := "field_name") |>
dplyr::rename({{facet}} == "field_name") |>
parse_select(.query)
}else{ # i.e. catch empty results
result
Expand Down Expand Up @@ -72,7 +72,6 @@ collect_lists_unnest <- function(.query){

# extract additional raw fields columns
clean_kvp_values <- function(df){
browser()
if(any(colnames(df) == "kvpValues")){
if(any(lengths(df$kvpValues) > 0)){
df <- df |>
Expand Down
2 changes: 1 addition & 1 deletion R/collect_taxa.R
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ collect_identifiers <- function(.query){
# we avoid `is_gbif()` here because other atlases use GBIF APIs
result$success <- TRUE
result <- result |>
dplyr::relocate(success, .before = 1) |>
dplyr::relocate("success", .before = 1) |>
parse_rename(.query)
}

Expand Down
4 changes: 2 additions & 2 deletions R/compound.R
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ compound.prequery <- function(x, mint_doi = FALSE, ...){
if(stringr::str_detect(x$type, "^metadata")){
build_query_set_metadata(x)
}else if(stringr::str_detect(x$type, "^files")){
list(as_query_media_files(x, ...)) |>
list(capture_media_files(x, ...)) |>
as_query_set()
}else if(x$type == "data/distribtions"){
build_query_set_distributions(x)
Expand Down Expand Up @@ -245,7 +245,7 @@ build_query_set_distributions <- function(x, ...){
}else{
if(!is.null(x$identify)){
result <- list(
collapse_taxa(list(identify = x$identify)) # wrong syntax?
capture_taxa(list(identify = x$identify)) # wrong syntax?
)
result[[2]] <- capture_distributions_data(x) # NOTE: shouldn't call microfunctions directly
}else{
Expand Down
5 changes: 3 additions & 2 deletions R/dplyr-glimpse.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
#' frame. It's a little like [str()] applied to a data frame but it tries to
#' show you as much data as possible. This implementation is specific
#' to `galah` and is evaluated lazily. `r lifecycle::badge("experimental")`
#'
#' @param x An object of class `data_request`
#' @param ... Other arguments, currently ignored
#' @details
#' This implementation of [glimpse()] actually involves changing the API call
#' sent to the server, then returning a novel object class with it's own
Expand All @@ -19,7 +20,7 @@ glimpse.data_request <- function(x, ...){
#' @rdname glimpse.data_request
#' @export
print.occurrences_glimpse <- function(x, ...){
y <- capture.output(dplyr::glimpse(x))
y <- utils::capture.output(dplyr::glimpse(x))
n_text <- attr(x, 'total_n') |>
formatC(big.mark = ",")
y[[1]] <- glue::glue("Rows: {n_text}")
Expand Down
5 changes: 3 additions & 2 deletions R/filter_object_classes.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
#'
#' In galah, there are several ways to provide filter information. To ensure
#' these are handled and printed correctly, they are assigned classes
#' @param x a list
#' @rdname filter_object_classes
#' @param x a list, or object of supported class
#' @name filter_object_classes
#' @order 1
#' @export
as_data_filter <- function(x){
Expand Down Expand Up @@ -44,6 +44,7 @@ as_files_filter <- function(x){
# Print functions for the above

#' @rdname filter_object_classes
#' @param ... Additional arguments, currently ignored
#' @order 5
#' @export
print.data_filter <- function(x, ...){
Expand Down
4 changes: 2 additions & 2 deletions R/galah_bbox.R
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ galah_bbox <- function(...) {
} else {
if (inherits(query, c("sf", "sfc"))) {
query <- query |>
sf::st_bbox(crs = st_crs("WGS84"))
sf::st_bbox(crs = sf::st_crs("WGS84"))
bbox_coords <- round(query, 5)
query <- query |>
sf::st_as_sfc(crs = st_crs("WGS84")) # FIXME: should we define the projection?
sf::st_as_sfc(crs = sf::st_crs("WGS84")) # FIXME: should we define the projection?
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions R/galah_polygon.R
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ parse_polygon <- function(query,
} else {

# remove space after "POLYGON" if present
if(str_detect(query, "POLYGON \\(\\("))
query <- string::str_replace(query, "POLYGON \\(\\(", "POLYGON\\(\\(")
if(stringr::str_detect(query, "POLYGON \\(\\("))
query <- stringr::str_replace(query, "POLYGON \\(\\(", "POLYGON\\(\\(")

if (stringr::str_detect(query, "POLYGON") &
!stringr::str_detect(query, "MULTIPOLYGON")) {
Expand Down
2 changes: 1 addition & 1 deletion R/handle_quosures.R
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ parse_in <- function(x, excl){
#' @noRd
#' @keywords internal
parse_c <- function(x, excl){
if(length(quo_get_expr(x)) < 2L){
if(length(rlang::quo_get_expr(x)) < 2L){
filter_error()
}
# convert to logical format using OR statements
Expand Down
4 changes: 2 additions & 2 deletions R/print.R
Original file line number Diff line number Diff line change
Expand Up @@ -182,10 +182,10 @@ print.prequery <- print.query
print.computed_query <- function(x, ...){
# calculate arrange/slice info
if(!is.null(x$arrange)){
arrange <- galah_pale_green(glue("\n
arrange <- galah_pale_green(glue::glue("\n
arrange: {x$arrange$variable} ({x$arrange$direction})"))
if(x$arrange$slice_called == TRUE){
slice <- galah_pale_green(glue("\n
slice <- galah_pale_green(glue::glue("\n
slice: {x$arrange$slice_n}"))
}else{
slice <- NULL
Expand Down
2 changes: 2 additions & 0 deletions R/search_all.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
#' @param all_fields `r lifecycle::badge("experimental")` If `TRUE`,
#' `show_values()` also returns all columns available from the API, rather
#' than the 'default' columns traditionally provided via galah.
#' @param One or more objects accepted by the taxonomic lookup services. See
#' [taxonomic_searches] for details
#' @details There are six categories of information, each with their own
#' specific sub-functions to look-up each type of information.
#' The available types of information for `search_all()` are:
Expand Down
2 changes: 1 addition & 1 deletion R/show_values.R
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ show_values <- function(df,
if(isTRUE(all_fields)){
request_metadata() |>
filter({{type}} == {{match_name}}) |>
select(everything()) |>
select(tidyselect::everything()) |>
unnest() |>
collect()
}else{
Expand Down
4 changes: 2 additions & 2 deletions R/utilities_internal.R
Original file line number Diff line number Diff line change
Expand Up @@ -483,8 +483,8 @@ reasons_supported <- function(){
atlas <- potions::pour("atlas", "region")
supported_atlases <- request_metadata(type = "apis") |>
collect() |>
dplyr::filter(type == "metadata/reasons") |>
dplyr::pull(atlas)
dplyr::filter(.data$type == "metadata/reasons") |>
dplyr::pull("atlas")
atlas %in% supported_atlases
}

Expand Down
57 changes: 0 additions & 57 deletions SPECIES_EXAMPLE.R

This file was deleted.

7 changes: 5 additions & 2 deletions man/filter_object_classes.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions man/glimpse.data_request.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions man/search_all.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions tests/testthat/test-atlas_species.R
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,8 @@ test_that("atlas_species reformats column names when empty tibble is returned",
})

test_that("`atlas_species()` accepts `distinct()` to set the grouping variable", {
skip_if_offline(); skip_on_ci()

genera <- galah_call() |>
identify("Limnodynastidae") |>
distinct(genusID) |>
Expand Down
Loading
Loading