diff --git a/IsoriX/.Rbuildignore b/IsoriX/.Rbuildignore index 159afbf..7ca5224 100644 --- a/IsoriX/.Rbuildignore +++ b/IsoriX/.Rbuildignore @@ -3,3 +3,6 @@ R/testpackage\.R man/isosim\.Rd README\.md +^.*\.Rproj$ +^\.Rproj\.user$ +^cran-comments\.md$ diff --git a/IsoriX/DESCRIPTION b/IsoriX/DESCRIPTION index 9d0c7d6..333eb68 100644 --- a/IsoriX/DESCRIPTION +++ b/IsoriX/DESCRIPTION @@ -1,5 +1,5 @@ Package: IsoriX -Version: 0.9.1.9999 +Version: 0.9.2 Encoding: UTF-8 Title: Isoscape Computation and Inference of Spatial Origins using Mixed Models Authors@R: c( @@ -41,10 +41,11 @@ Suggests: rgl, spelling, testthat, - webshot2 + webshot2, + withr LazyData: true -URL: https://github.com/courtiol/IsoriX/ -BugReports: https://github.com/courtiol/IsoriX/issues/ +URL: https://github.com/courtiol/IsoriX, https://bookdown.org/content/782 +BugReports: https://github.com/courtiol/IsoriX/issues RoxygenNote: 7.2.3 Roxygen: list(markdown = TRUE) Language: en-GB diff --git a/IsoriX_project.Rproj b/IsoriX/IsoriX.Rproj similarity index 70% rename from IsoriX_project.Rproj rename to IsoriX/IsoriX.Rproj index 1cc8893..9f96499 100644 --- a/IsoriX_project.Rproj +++ b/IsoriX/IsoriX.Rproj @@ -1,8 +1,8 @@ Version: 1.0 -RestoreWorkspace: No -SaveWorkspace: No -AlwaysSaveHistory: No +RestoreWorkspace: Default +SaveWorkspace: Default +AlwaysSaveHistory: Default EnableCodeIndexing: Yes UseSpacesForTab: Yes @@ -14,7 +14,5 @@ LaTeX: pdfLaTeX BuildType: Package PackageUseDevtools: Yes -PackagePath: IsoriX PackageInstallArgs: --no-multiarch --with-keep.source -PackageCheckArgs: --as-cran PackageRoxygenize: rd,collate,namespace diff --git a/IsoriX/NAMESPACE b/IsoriX/NAMESPACE index 65f5214..2257c70 100644 --- a/IsoriX/NAMESPACE +++ b/IsoriX/NAMESPACE @@ -10,12 +10,17 @@ S3method(print,CALIBFIT) S3method(print,ISOFIND) S3method(print,ISOFIT) S3method(print,ISOSCAPE) +S3method(readRDS,character) +S3method(saveRDS,CALIBFIT) +S3method(saveRDS,ISOFIND) +S3method(saveRDS,ISOSCAPE) S3method(summary,CALIBFIT) S3method(summary,ISOFIND) S3method(summary,ISOFIT) S3method(summary,ISOSCAPE) export("crs<-") export("ext<-") +export() export(RdBuTheme) export(calibfit) export(cellSize) @@ -45,9 +50,15 @@ export(prepcipitate) export(prepraster) export(prepsources) export(rast) +export(saveRDS_IsoriX) export(shift) export(values) export(xyplot) +exportClasses(CALIBFIT) +exportClasses(ISOFIND) +exportClasses(ISOSCAPE) +exportMethods(readRDS) +exportMethods(saveRDS) importFrom(grid,gpar) importFrom(grid,grid.text) importFrom(lattice,panel.points) @@ -65,5 +76,7 @@ importFrom(terra,plot) importFrom(terra,points) importFrom(terra,polys) importFrom(terra,rast) +importFrom(terra,readRDS) +importFrom(terra,saveRDS) importFrom(terra,shift) importFrom(terra,values) diff --git a/IsoriX/NEWS.md b/IsoriX/NEWS.md new file mode 100644 index 0000000..5cb978b --- /dev/null +++ b/IsoriX/NEWS.md @@ -0,0 +1,316 @@ +# IsoriX 0.9.2 + +* **main release goal** + + * This release restores some key features that had been lost after dropping the direct dependencies **raster** and **sp** in v0.9.1.: + * the possibility to save and reload objects created by IsoriX (now via `saveRDS()` and `readRDS()`). (#172) + * the possibility to plot oceans and other masks containing "holes" (thanks to changes in **lattice** and **rasterVis**). (#169, #170) + +* **breaking changes** + + As compared to IsoriX versions < 0.9.1, the following changes may break existing code: + + * code for plots: + + ```r + layer(sp.polygons(CountryBorders, col = "white")) + + layer(sp.polygons(OceanMask, col = "white", fill = "lightgreen")) + ``` + + now needs to be replaced by: + + ```r + layer(lpolygon(CountryBorders, border = "white")) + + layer(lpolygon(OceanMask, border = "white", col = "lightgrey")) + ``` + + Notice both the change in the function used to plot polygons and the change in the arguments used to control the colour of the borders and the colour of the fill. + Similarly, the function `sp.points()` should be replaced by `lpoints()` and so on. + + * saving and reloading objects `save()` & `load()` can no longer be used, one must instead use `saveRDS()` & `readRDS()`. + +* **new features** + + * the NEWS (contained in this file) are now stored in `NEWS.md` rather than in `inst/NEWS.Rd` and use a markdown syntax. + * it is now possible to have missing values in predictors used to build isoscapes and in the isoscapes themselves. + +* **major changes** + + * plotting methods for polygons, lines and points have been removed from IsoriX and are now handled by **lattice** and **rasterVis**. + * new S3 and S4 methods `saveRDS()` for objects of the class `ISOSCAPE`, `CALIBFIT` & `ISOFIND` (see `?serialize` for details). + * new S3 and S4 method `readRDS()` which should be able to read objects created in IsoriX, as well as objects created with **terra** and objects created otherwise. + + In case of issues reading RDS files not created by IsoriX, try using `base::readRDS()` with the namespace `base::` mentioned explicitly and please let us know of this issue. + +* **minor changes** + + * the bookdown is now listed in DESCRIPTION. + * some old URLs have been updated. + +* **bug fixes** + + * the function `isomultiscape()` was still using **raster** instead of **terra**. + * the object `PrecipBrickDE` was still using **raster** instead of **terra**. + +* **internal (geeky) changes** + + * new function `.safe_and_quiet_predictions()` which turns wraps around `spaMM::predict.HLfit()`, turns warnings into messages, allows not to display the same messages many times, and outputs `NA`s when `spaMM::predict.HLfit()` fails. For testing, `options_IsoriX(spaMM_debug = TRUE)` may be used to restore the original behaviour of `spaMM::predict.HLfit()`. + * the classes `ISOSCAPE`, `CALIBFIT` & `ISOFIND` are now also defined as S4 classes, which was necessary to design methods for `saveRDS()` which are compatible with **terra**. + * the package now contains a `WORDLIST` file which is used by `devtools::spell_check()` (via `spelling::spell_check_package`) to check for typos in the documentation. + * fixed various `|` or `&` which should have always been `||` or `&&` (spotted via `lintr::lint_package()`). + * fixed various sequences of the form `1:...` which should have always been handled by `seq_along` or `seq_len` to avoid NULL issues (spotted via `lintr::lint_package()`). + * package **withr** now suggested; we use it to automatically delete a file created during testing (using `withr::defer`). + * instead of one Rproj file used to handle both the package and the bookdown development, we now rely on 2 Rproj files, which solves some limitations encountered with **usethis**. + + +# v0.9.1 + +* **main release goal** + + * Several spatial packages previously used by IsoriX are likely to retire sometimes in October 2023. + The maintainers of those packages have recommended developers to instead rely on alternative packages which have been recently developed and which superseed the old packages. As a consequence, we had to recode a lot of IsoriX for it to continue to work. For the most part, these changes are internal and should not impact much users, but it is possible that old workflows used to create plots will have to be adapted for the code to keep working. Moreover, IsoriX is not the only package that had to be overhauled, other packages used by IsoriX are also being adapted, which means that the programming landscape is dynamic and bugs caused by incompatibility between packages are likely to surface. We will do our best to react quickly, but please let us know as soon as something goes wrong by dropping issues on the GitHub repository for IsoriX (https://github.com/courtiol/IsoriX/issues). All this change can be perceived as annoying, but it is also for the best: it will allow us to add new features more easily in IsoriX in the future and it also makes it easier for users to convert IsoriX outputs so as to manipulate them using packages such as **sf** and **ggplot2**. + +* **major changes** + + * IsoriX no longer relies on the package **raster**. It instead now uses **terra** for handling rasters. (#90 #161) + * IsoriX no longer relies on the package **sp**. Plotting functionalities from sp have now been replaced by direct calls to **lattice**. For now, we had to implement methods and generics calling **lattice** in IsoriX, but those should ultimately be handled within **rasterVis** and **lattice**. + +* **minor changes** + + * `getprecip()` now normalizes the input file and returns the path where the precipitation rasters are stored. + * `prepcipitate()` can now handle as input for `path =` either the full path to the files returned by `getprecip()` -- which contains the folder provided to `path` when calling `getprecip()` in addition to `"/wc2.1_30s_prec"` -- or the reduced path which only contains the folder provided to `path` when calling `getprecip()`. + * `getprecip()` now changes the timeout R options temporarily so as to avoid the download to fail because the default timeout setting is too short. (#148) + * the documentation for the datasets `GNIPDataALLagg` and `GNIPDataEUagg` was incorrect. (#158) + * one message about possible extrapolation during calibration was erroneous and is now removed. (#159) + +* **internal (geeky) changes** + + * `OceanMask` and `CountryBorders` are no longer stored as RDA files in `/data`, but as RDS files in `/extata` since objects created with **terra** cannot be saved as RDA files. These files are automatically loaded when the package is attached. + * **elevatr** moved from Imports to Suggests. (#157) + + +# v0.9.0 + +* **bug fixes** + + * the previous released introduced an error in how the variance of the assignment test is computed in the absence of calibration (with important consequence in terms of assignments). This is now fixed. (#151) + +* **minor changes** + + * the base package **colourspace** is now suggested to avoid a note in R CMD check. + + +# v0.8.3 + +* **new features** + + * the function `calibfit()` gains an argument method that allows for selecting one of four calibration methods ("wild", "lab", "desk", "desk_inverse"). This allows for users to use: + 1) calibration samples associated with unknown environmental isotopic values, + 2) calibration samples associated with known environmental isotopic values, or + 3) & 4) the intercept and slope of a calibration relationship computed by others (e.g. values found in a paper). + + Note: the "desk" methods allow for the consideration of a fractionation factor too (i.e. slope = 0). See `?calibfit` for details. (#20 & #142) + + * the function `getelev()` has been completely rewritten so as to rely on the package **elevatr** to download elevation data. You should check `?getelev` for learning how to use the new version of the function, but we retained the core principle of the previous function so that old workflow will only require minor adjustments. The new version still saves a `*.tif` file on the disk, albeit using a different file name to avoid (data) confusion. (#140 & #107) + * the function `isofind()` gains an argument `neglect_covPredCalib` that allows for the computation of a covariance term that was so far neglected in IsoriX. See `?isofind` for details. (#143) + * the function `prepraster()` gains an argument `values_to_zero` to turn a range of elevation values to zeros (nullify negative elevation values by default). This is particular useful because the new version of `get_elev()` download an elevation raster that includes bathymetry. + * new internal function `.invert_reg()` to invert regression (used for method "desk_inverse" in `calibfit()`. + +* **minor changes** + + * when calling `plot()` on an object created with `calibfit()`, the plotting function now returns the fitted values and CI for users to be able to make alternative plots. (#44) + * new argument `xlim` for the plotting function for calibration fits. + * new argument `line` for customizing how to plot the regression line in calibration fits. + * the summary method for calibration fits now displays the residual variance. + * `calibfit()` performs more check on extrapolation. (#119) + * when using `plot()` on an object of class ISOFIT, the x-axis for the plot showing the Matérn correlation should have a range more adequate irrespective when autocorrelation is strong over short distances. (#134) + * documentation for `?plot()` now contains a description of what symbols mean in plots. (#138) + * when calling `plot()` on an object created with `isofind()`, the plotting function now detects sample of size 1 and no longer displays "Group" in the title of the assignment plot even if `who` = "group". (#120) + * all functions accepting a `data.frame` as input should also now be compatible when provided with a `tibble`. (#118) + * typos have been corrected. (#130) + * default y-axis title changed to "Isotopic value in the environment" when plotting calibration fits to be flexible enough irrespective of the methods used in `calibfit()` + +* **internal (geeky) changes** + + * the argument `long_min`, `long_max`, `lat_min` & `lat_max` function `prepsources()` now have explicit default values and should no longer be missing. + * the version of **spaMM** required by IsoriX has changed to 3.13 so as to benefit from a new extractor we rely on for the computation of the 4th variance term during assignment. (#143) + * the function depending on the package **RandomFields** are no longer available since that package has been (for now) retired by CRAN :-( + * IsoriX should now work with tibbles as inputs. (#118) + +* **bug fixes** + + * the printing method for the object of class ISOSCAPE was somehow not exported and thus not used (unreported issue). + * plotting on a sphere ISOFIND objects did not work in some cases. (#126) + + +# v0.8.2 + +* **new features** + + * new argument `ylim` for the plotting function for calibration fits. + * it is now possible to calibrate data containing missing isotopic values. + * it is now possible to assign data containing missing isotopic values. + +* **internal (geeky) changes** + + * the SpatialPolygons `CountryBorders` and `OceanMask` have been rebuilt for possibly improving the compatibility with new **sp** & **rgdal**. + * the website for `WorlClim` has now changed address, so links have been updated. + * **rgdal** is now listed as a suggested package. + +* **minor changes** + + * several URL had changed and have been updated. + * all old defunct functions have been removed from the package. + + +# v0.8.1 + +* **bug fixes** + + * the plotting function was not working for isoscapes not stored in memory due to a wrong use of the quantile function. Many thanks to Dr. Gary Roemer and Amy Withers for reporting it! (#113) + +* **new features** + + * the datasets used in Courtiol et al. 2019 are now provided. + * many useful functions from **raster**, **rasterVis**, **lattice**... are now re-exported so they can be used without attaching those packages. + * new option in plots that allows to map the isoscape onto a sphere. + * a new dataset `PrecipBrickDE` containing monthly precipitation amounts for Germany. + * an argument `y_title` for the plotting function for isoscapes to allow one to simply change the title. + * arguments `xlab` and `ylab` for the plotting function for calibration fits. + * new method points for plotting more than one calibration fit. + * the plotting function for assignments can now show the location of the assignment samples. + +* **major changes** + + * the citations for the package have been updated! + * many objects have been renamed to prepare the release of the version 1.0. + * the vignettes have now been moved to a bookdown. To access the documentation you should now visit: https://bookdown.org/content/782/ + +* **minor changes** + + * all arguments with the structure `bla.bla` have been renamed so as to match the structure `bla_bla`. + * the plotting function for `calibfit()` gains an argument `...` for more control. + * a plotting method for `rasterLayer` has been included for convenience. + * the function `relevate()` is now called `prepraster()`. + * the function `prepdata()` is now called `prepsources()`. + * in several functions the argument `elevation.raster` has been renamed as `raster`. + * in several functions the argument `xxx.data` has been renamed as `data`. + +* **internal (geeky) changes** + + * the file storing the internal functions is now called `zzz.R`. + * the `dontrun` and `donttest` calls have been replaced by comments due to new R CMD check flags. + * the function `downloadfile()` is now exported. + * large temporary objects are now deleted within isofind to limit memory usage. + * the package is now being tested using **testthat**, but tests will be implemented in the future. + * a lot of the internal code as been rewritten to comply more closely to the IsoriX coding style. + * the list of suggested packages has been revised and **rgdal** removed as it caused (again) problems with Travis CI. + * following a change in **spaMM** `predict.HLfit()`, the prediction are now being made by chunk of 1000 points instead of 150. This should lead to a tiny gain in performance. + * the function `isoscape()` was performing predictions twice every 150 (or now 1000) locations, this was not influencing the isoscapes produced, but this has now been corrected. + * the function `prepraster()` now produces an raster stored in memory if it is possible. This should prevent bugs that appears when using loaded rasters that were previously saved (the temporary link to the hard drive location is no longer correct in this case). + * the function `.objective_fn_calib()` has been moved within the function `calibfit()` as it is not used elsewhere. + * the function `calibfit()` as been prepared for a possible activation of a random effect for species ID in the future. But whether it would make sense or not remains to be determined. + * the function `.Fisher_method()` now directly computes the exponential of the log pv if only one value is provided. This leads to much faster assignment in the case of a single observation. + +* **bug fixes** + + * the plotting function for calibration fit was displaying CI based on variance instead of SD. + * the function `getprecip()` and `prepcipitate()` were not handling paths manually defined properly. + * the plotting functions were crashing in case of no variation in the landscape. + * the plotting functions were crashing when called on multiple-raster objects not stored 'inMemory'. + * the plotting function for fitted model was not displaying one plot in RStudio when called on objects of class `MULTIISOFIT`. + + +# v0.7.1 + +* **new features** + + * this is a minor update necessary to maintain compatibility with **spaMM** 2.4. + + +* **internal (geeky) changes** + + * the syntax for the extraction of correlation terms of **spaMM** objects has changed. + + +# v0.7 + +* **new features** + + * the calibration step is now optional, allowing for users to use an isoscape directly fitted on tissues instead of precipitation water. + * the function `queryGNIP()` has been renamed and is now called `prepdata()`, this function can also handle other datasets than GNIP. + * the function `relevate()` has been modified to make crop possible around the pacific meridian -180/180 (but several issues remain to handle extra plot layers automatically). + + +* **internal (geeky) changes** + + * an additional options as been added to prevent prompting during examples. + * new internal function `.converts_months_to_numbers()`. + + +# v0.6 + +* **new features** + + * the maximum duration of running time for examples can now be controlled using `IsoriX.options(example_maxtime = XX)`. + * due to new GNIP policies, we no longer provide the GNIP dataset for the entire World, but only a subset containing data for Germany (users should thus compile their precipitation data themselves from the 'wiser' platform provided by GNIP; see vignette Workflow). + * it is now possible to control the colours and labels for the levels of isotopes or p-values in plots. + * for plotting, it is no longer needed to load the ocean mask and country borders (it now happens automatically). + * the function `relevate()` now allows for a cropping larger than the extent of the weather stations by means of the argument `margin_pct`. + * it is now possible to create the so-called annual averaged precipitation isoscapes! + * queryGNIP can now split the dataset per month or year at each location during the aggregation. + * new function `prepcipitate()` to prepare the precipitation brick. + * new function `getprecip()` to download monthly precipitation rasters from WorldClim. + * new function `isomultifit()` fitting isoscapes per strata (month, year, or any "split"). + * new function `isomultiscape()` building isoscapes averaged across strata. + * new function `create_aliens()` simulating of organism data. + +* **minor changes** + + * the inputs for filtering data by month or year using `queryGNIP()` have changed. + * the default fixed effect structure for the mean model is `isofit()` has changed. + +* **internal (geeky) changes** + + * the namespace is now generated with Roxygen2. + * the datasets are now 'lazy-loaded'. + * new vignette for coding conventions. + * changed some object names following our coding convention (more to come). + + +# v0.5 + +* **bug fixes** + + * the package could not be detached and reloaded. + * the citation was not correct. + * the path in `getelev()` was breaking in some cases. + * the title of the assignment plot was missing when a single individual was plotted. + +* **new features** + + * new vignette explaining how to export spatial objects to GIS. + * the file `GNIPdata` has been updated and now contains data for 2014. + * names of all functions and objects have been refactored to remove upper cases. + * links to our GitHub directory have been added. + * new function `downloadfile()` to download non standard elevation raster or any other file. + * function `getelev()` can perform MD5 sum checks if the package **tools** is installed. + * function `getelev()` can display additional information during download if `verbose` > 1. + * the column `animalID` in the assignment dataset can now handle names with spaces. + * added **Codecov** to track test coverage for the package. + +* **minor changes** + + * the modification of the option `set_ll_warn` from the **sp** package has been moved to `.onLoad()` (instead of `.onAttach()`) and the original state is now restored while unloading IsoriX. + * the Earth distance method has been moved to the package **spaMM**. + * function `getelev()` lost its `address` argument as `downloadfile()` should now be used to download non-standard elevation rasters. + * some typo fixed in documentation files. + * **RandomFields** moved to suggest. + * `*.Rd` files for documentation are now generated with Roxygen2. + * `queryGNIP()` is now provided with a single month argument specifying the months to select. + + +# v0.4-1 + +* this was the first version of IsoriX submitted to CRAN. + diff --git a/IsoriX/R/IsoriX-datasets.R b/IsoriX/R/IsoriX-datasets.R index 4d69cc9..156ee4c 100644 --- a/IsoriX/R/IsoriX-datasets.R +++ b/IsoriX/R/IsoriX-datasets.R @@ -1,10 +1,10 @@ #' Assignment dataset for bat species -#' +#' #' This dataset contains data from Voigt & Lenhert (2019). It contains hydrogen #' delta values of fur keratin from common noctule bats (\emph{Nyctalus noctula}) #' killed at wind turbines in northern Germany. The data can be used as an #' example to perform assignments using the function [isofind]. -#' +#' #' @name AssignDataBat #' @docType data #' @noMd @@ -15,29 +15,29 @@ #' [, 2] \tab lomg \tab (*numeric*) \tab Longitude coordinate (decimal degrees)\cr #' [, 4] \tab sample_value \tab (*numeric*) \tab Hydrogen delta value of the tissue\cr } #' @seealso [isofind] to perform assignments -#' @references Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial +#' @references Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial #' mammals using stable isotopes. In Hobson KA, Wassenaar LI (eds.), Tracking Animal #' Migration with Stable Isotopes, second edition. Academic Press, London. -#' +#' #' @source data directly provided by the authors of the following publication #' @keywords datasets #' @examples -#' +#' #' head(AssignDataBat) #' str(AssignDataBat) -#' +#' NULL #' Assignment dataset for bat species -#' +#' #' This dataset contains data from Voigt, Lehmann and Greif (2015). It contains #' hydrogen delta values of fur keratin from bats captured in 2008, 2009 and #' 2013 from their roosting sites in Bulgaria. We only retained the bats of the #' genus Myotis from the original study. The data can be used as an example to #' perform assignments using the function [isofind]. -#' +#' #' @name AssignDataBat2 #' @docType data #' @noMd @@ -53,22 +53,22 @@ NULL #' @source data directly provided by the authors of the following publication #' @keywords datasets #' @examples -#' +#' #' head(AssignDataBat2) #' str(AssignDataBat2) -#' +#' NULL #' Calibration dataset for bat species -#' +#' #' This dataset contains hydrogen delta values of fur keratin from 6 sedentary #' bat species. It corresponds to the combination of several studies as detailed #' in Voigt & Lenhert 2019. This is the dataset used in Courtiol et al. 2019. #' The data can be used as an example to fit a calibration model using the #' function [calibfit]. -#' +#' #' Users who wish to use their own dataset for calibration should create a #' *dataframe* of similar structure than this one (only the column 'species' #' can be dropped). The columns should possess the same names as the ones @@ -76,7 +76,7 @@ NULL #' information can be extracted from a high resolution elevation raster using #' the function [terra::extract] (see **Examples** in #' [CalibDataBat2]). -#' +#' #' @name CalibDataBat #' @docType data #' @noMd @@ -90,19 +90,19 @@ NULL #' [, 6] \tab species \tab (*factor*) \tab A code for the species\cr #' [, 7] \tab sample_value \tab (*numeric*) \tab Hydrogen delta value of the tissue\cr } #' @seealso [CalibDataBat2] for another (related) calibration dataset -#' +#' #' [calibfit] to fit a calibration model -#' @references Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial +#' @references Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial #' mammals using stable isotopes. In Hobson KA, Wassenaar LI (eds.), Tracking Animal #' Migration with Stable Isotopes, second edition. Academic Press, London. -#' +#' #' Courtiol A, Rousset F, Rohwäder M, Soto DX, Lehnert L, Voigt CC, Hobson KA, Wassenaar LI, Kramer-Schadt S (2019). Isoscape #' computation and inference of spatial origins with mixed models using the R package IsoriX. In Hobson KA, Wassenaar LI (eds.), #' Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. -#' +#' #' @keywords datasets #' @examples -#' +#' #' head(CalibDataBat) #' str(CalibDataBat) NULL @@ -110,12 +110,12 @@ NULL #' Calibration dataset for bat species -#' +#' #' This dataset contains hydrogen delta values of fur keratin from sedentary #' bat species captured between 2005 and 2009 from Popa-Lisseanu et al. (2012). #' The data can be used as an example to fit a calibration model using the #' function [calibfit]. -#' +#' #' Users who wish to use their own dataset for calibration should create a #' *dataframe* of similar structure than this one (only the column #' 'species' can be dropped). The columns should possess the same names as the @@ -123,7 +123,7 @@ NULL #' elevation information can be extracted from a high resolution elevation #' raster using the function [terra::extract] (see **Examples**). #' Note that the original study used a different source of elevation data. -#' +#' #' @name CalibDataBat2 #' @docType data #' @noMd @@ -136,7 +136,7 @@ NULL #' [, 5] \tab sample_ID \tab (*factor*) \tab Identification of the sampled animal\cr #' [, 6] \tab sample_value \tab (*numeric*) \tab Hydrogen delta value of the tissue\cr } #' @seealso [CalibDataBat] for another (related) calibration dataset -#' +#' #' [calibfit] to fit a calibration model #' @references Popa-Lisseanu, A. G., Soergel, K., Luckner, A., Wassenaar, L. #' I., Ibanez, C., Kramer-Schadt, S., Ciechanowski, M., Goerfoel, T., Niermann, @@ -146,15 +146,15 @@ NULL #' @source data directly provided by the authors of the following publication #' @keywords datasets #' @examples -#' +#' #' head(CalibDataBat2) #' str(CalibDataBat2) -#' +#' #' ## The following example require to have downloaded #' ## an elevation raster with the function getelev() #' ## and will therefore not run unless you uncomment it -#' -#' #if (require(terra)){ +#' +#' # if (require(terra)){ #' # ## We delete the elevation data #' # CalibDataBat2$elev <- NULL #' # @@ -169,17 +169,17 @@ NULL #' # ElevationRasterBig, #' # cbind(CalibDataBat2$long, CalibDataBat2$lat)) #' # head(CalibDataBat2) -#' #} -#' +#' # } +#' NULL #' Simulated assignment dataset -#' -#' This dataset contains simulated hydrogen delta values. +#' +#' This dataset contains simulated hydrogen delta values. #' The data can be used as an example to perform assignments using the function [isofind]. -#' +#' #' @name AssignDataAlien #' @docType data #' @noMd @@ -190,66 +190,68 @@ NULL #' @seealso [isofind] to perform assignments #' @keywords datasets #' @examples -#' +#' #' head(AssignDataAlien) #' str(AssignDataAlien) -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## The following describes how we created such dataset -#' -#' ### We prepare the precipitation data -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' ### We fit the models for Germany -#' GermanFit <- isofit(data = GNIPDataDEagg) -#' -#' ### We build the isoscape -#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -#' -#' ### We create a simulated dataset with 1 site and 10 observations -#' set.seed(1L) -#' Aliens <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), -#' isoscape = GermanScape, -#' raster = ElevRasterDE, -#' coordinates = data.frame(site_ID = "Berlin", -#' long = 13.52134, -#' lat = 52.50598), -#' n_sites = 1, -#' min_n_samples = 10, -#' max_n_samples = 10) -#' AssignDataAlien <- Aliens[, c("sample_ID", "sample_value")] -#' -#' ### Uncomment the following to store the file as we did -#' #save(AssignDataAlien, file = "AssignDataAlien.rda", compress = "xz") -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## The following describes how we created such dataset +#' +#' ### We prepare the precipitation data +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' ### We fit the models for Germany +#' GermanFit <- isofit(data = GNIPDataDEagg) +#' +#' ### We build the isoscape +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' +#' ### We create a simulated dataset with 1 site and 10 observations +#' set.seed(1L) +#' Aliens <- create_aliens( +#' calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), +#' isoscape = GermanScape, +#' raster = ElevRasterDE, +#' coordinates = data.frame( +#' site_ID = "Berlin", +#' long = 13.52134, +#' lat = 52.50598 +#' ), +#' n_sites = 1, +#' min_n_samples = 10, +#' max_n_samples = 10 +#' ) +#' AssignDataAlien <- Aliens[, c("sample_ID", "sample_value")] +#' +#' ### Uncomment the following to store the file as we did +#' # save(AssignDataAlien, file = "AssignDataAlien.rda", compress = "xz") #' } -#' +#' NULL #' Simulated calibration dataset -#' +#' #' This dataset contains simulated hydrogen delta values for corresponding locations -#' based on an assumed linear relationship between the animal tissue value and the +#' based on an assumed linear relationship between the animal tissue value and the #' hydrogen delta values in the environment. #' The data can be used as an example to fit a calibration model using the #' function [calibfit]. -#' +#' #' Users who wish to use their own dataset for calibration should create a -#' *dataframe* of similar structure than this one. The columns should possess -#' the same names as the ones described above. If the elevation is unknown at the +#' *dataframe* of similar structure than this one. The columns should possess +#' the same names as the ones described above. If the elevation is unknown at the #' sampling sites, elevation information can be extracted from a high resolution elevation #' raster using the function [terra::extract]. In this dataset, we #' retrieved elevations from the Global Multi-resolution Terrain Elevation Data #' 2010. -#' +#' #' @name CalibDataAlien #' @docType data #' @noMd @@ -264,59 +266,58 @@ NULL #' @seealso [calibfit] to fit a calibration model #' @keywords datasets #' @examples -#' +#' #' head(CalibDataAlien) #' str(CalibDataAlien) -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## We prepare the precipitation data -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' ## We fit the models for Germany -#' GermanFit <- isofit(data = GNIPDataDEagg) -#' -#' ## We build the isoscape -#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -#' -#' ## We create a simulated dataset with 50 site and 10 observations per site -#' set.seed(2L) -#' CalibDataAlien <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), -#' isoscape = GermanScape, -#' raster = ElevRasterDE, -#' n_sites = 50, -#' min_n_samples = 10, -#' max_n_samples = 10) -#' plot(sample_value ~ source_value, data = CalibDataAlien) -#' abline(3, 0.5) -#' -#' CalibDataAlien$source_value <- NULL -#' -#' ## Uncomment the following to store the file as we did -#' #save(CalibDataAlien, file = "CalibDataAlien.rda", compress = "xz") -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We prepare the precipitation data +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' ## We fit the models for Germany +#' GermanFit <- isofit(data = GNIPDataDEagg) +#' +#' ## We build the isoscape +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' +#' ## We create a simulated dataset with 50 site and 10 observations per site +#' set.seed(2L) +#' CalibDataAlien <- create_aliens( +#' calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), +#' isoscape = GermanScape, +#' raster = ElevRasterDE, +#' n_sites = 50, +#' min_n_samples = 10, +#' max_n_samples = 10 +#' ) +#' plot(sample_value ~ source_value, data = CalibDataAlien) +#' abline(3, 0.5) +#' +#' CalibDataAlien$source_value <- NULL +#' +#' ## Uncomment the following to store the file as we did +#' # save(CalibDataAlien, file = "CalibDataAlien.rda", compress = "xz") #' } -#' -#' +#' NULL #' Borders of world CountryBorders -#' +#' #' This dataset contains a polygon polygon SpatVector (from \pkg{terra}). #' It can be used to draw the borders of world countries. -#' -#' +#' +#' #' @name CountryBorders #' @docType data #' @format A *SpatVector* object -#' @seealso +#' @seealso #' - [OceanMask] for another polygon used to embellish the plots #' @source This *SpatVector* is derived from the package #' \pkg{rnaturalearth}. Please refer to this other package for description and @@ -324,41 +325,41 @@ NULL #' dataset. #' @keywords datasets #' @examples -#' -#' plot(CountryBorders, border="red", col="darkgrey") -#' +#' +#' plot(CountryBorders, border = "red", col = "darkgrey") +#' #' ## How did we create this file? -#' +#' #' ## Uncomment the following to create the file as we did #' # if (require(rnaturalearth) && require(terra)) { #' # CountryBorders <- rnaturalearth::ne_countries(scale = 'medium', returnclass = 'sf') #' # CountryBorders <- vect(CountryBorders[, 0]) #' # #saveRDS(CountryBorders, file = "IsoriX/inst/extdata/CountryBorders.rds", compress = "xz") #' # } -#' +#' NULL #' Mask of world oceans -#' +#' #' This dataset contains a polygon SpatVector (from \pkg{terra}). #' It can be used to mask large bodies of water. -#' -#' +#' +#' #' @name OceanMask #' @docType data #' @format A *SpatVector* object -#' @seealso +#' @seealso #' - [CountryBorders] for another polygon used to embellish the plots #' @source See example for details on how we created the dataset. #' @keywords datasets #' @examples -#' -#' plot(OceanMask, col='blue') -#' +#' +#' plot(OceanMask, col = "blue") +#' #' ## How did we create this file? -#' +#' #' ## Uncomment the following to create the file as we did #' # if (require(terra)) { #' # worldlimit <- vect(ext(CountryBorders)) @@ -366,22 +367,21 @@ NULL #' # OceanMask <- worldlimit - CountryBorders #' # #saveRDS(OceanMask, file = "IsoriX/inst/extdata/OceanMask.rds", compress = "xz") #' # } -#' -#' +#' NULL #' The raster of elevation for Germany -#' +#' #' This raster contains the elevation of the surface of Germany (meters above sea #' level) with a resolution of approximately 40 square-km. -#' +#' #' This raster contains elevation data of Germany in a highly aggregated form #' corresponding to a resolution of approximately one elevation value per 40 #' square-km. This is only for the purpose of having a small and easy-to-handle #' file to practice, but it should not be used to perform real assignments! -#' +#' #' @name ElevRasterDE #' @docType data #' @format A *SpatRaster* object @@ -389,33 +389,32 @@ NULL #' @source \url{https://topotools.cr.usgs.gov/gmted_viewer/viewer.htm} #' @keywords datasets #' @examples -#' +#' #' ## Compute crudely the resolution (approximative size of cells in km2) #' median(values(cellSize(ElevRasterDE, unit = "km"))) -#' +#' #' ## How did we create this file (without IsoriX) ? -#' +#' #' ## Uncomment the following to create the file as we did -#' +#' #' # ElevRasterDE <- elevatr::get_elev_raster(locations = data.frame( #' # x = c(5.5, 15.5), y = c(47, 55.5)), #' # prj = "+proj=longlat +datum=WGS84 +no_defs", #' # clip = "bbox", z = 3) -#' # +#' # #' # ElevRasterDE <- terra::rast(ElevRasterDE) -#' -#' +#' +#' #' ## How to create a similar file with IsoriX ? #' # #' # ## Download the tif file (see ?getelev) #' # getelev(file = "~/ElevRasterDE.tif", #' # z = 3, #' # long_min = 5.5, long_max = 15.5, lat_min = 47, lat_max = 55.5) -#' +#' #' # ## Convert the tif into R raster format #' # ElevRasterDE <- rast('~/ElevRasterDE.tif') -#' -#' +#' NULL @@ -427,7 +426,7 @@ NULL #' #' The data are derived from "precipitation (mm) WorldClim Version2" which can #' be downloaded using the function [getprecip]. -#' +#' #' @name PrecipBrickDE #' @docType data #' @format A *RasterBrick* @@ -435,24 +434,24 @@ NULL #' @source \url{https://www.worldclim.org/data/worldclim21.html} #' @keywords datasets #' @examples -#' +#' #' ## The following example requires to download #' ## a large precipitation rasters with the function getprecip() #' ## and will therefore not run unless you uncomment it -#' +#' #' ## How did we create this file? -#' +#' #' ## Uncomment the following to create the file as we did -#' #getprecip() ## Download the tif files (~ 1 Gb compressed) -#' #PrecipBrickDE <- prepcipitate(raster = ElevRasterDE) -#' #save(PrecipBrickDE, file = "PrecipBrickDE", compress = "xz") -#' +#' # getprecip() ## Download the tif files (~ 1 Gb compressed) +#' # PrecipBrickDE <- prepcipitate(raster = ElevRasterDE) +#' # terra::saveRDS(PrecipBrickDE, file = "PrecipBrickDE.rds", compress = "xz") +#' NULL #' Hydrogen delta values in precipitation water, Germany -#' +#' #' This dataset contains the hydrogen delta value from #' precipitation water sampled at weather stations between 1961 and 2013 in #' Germany. These data have been kindly provided by Christine Stumpp and @@ -464,19 +463,19 @@ NULL #' we do provide aggregated versions of it; see [GNIPDataEUagg]). #' You can still download the complete GNIP dataset for free, but you will have #' to proceed to a registration process with GNIP and use their downloading -#' interface WISER (\url{http://www-naweb.iaea.org/napc/ih/IHS_resources_isohis.html}). -#' +#' interface WISER (\url{https://nucleus.iaea.org/wiser/index.aspx}). +#' #' The dataset contains non-aggregated data for 27 weather stations across Germany. -#' +#' #' This dataset is the raw data source and should not be directly used for #' fitting isoscapes. -#' +#' #' Please use [prepsources] to filter the dataset by time and #' location. -#' +#' #' If you want to use your own dataset, you must format your data as those #' produced by the function [prepsources]. -#' +#' #' @name GNIPDataDE #' @docType data #' @noMd @@ -492,17 +491,17 @@ NULL #' @seealso [prepsources] to prepare the dataset for the analyses and #' to filter by time and location. #' @references GNIP Project IAEA Global Network of Isotopes in Precipitation: \url{https://www.iaea.org} -#' +#' #' Stumpp, C., Klaus, J., & Stichler, W. (2014). Analysis of long-term stable isotopic composition in German precipitation. Journal of hydrology, 517, 351-361. -#' +#' #' Klaus, J., Chun, K. P., & Stumpp, C. (2015). Temporal trends in d18O composition of precipitation in Germany: insights from time series modelling and trend analysis. Hydrological Processes, 29(12), 2668-2680. -#' +#' #' @source Data provided by the IAEA. #' @keywords datasets #' @examples -#' +#' #' head(GNIPDataDE) -#' +#' NULL @@ -520,14 +519,14 @@ NULL #' still download the complete GNIP dataset for free, but you will have to #' proceed to a registration process with GNIP and use their downloading #' interface WISER -#' (\url{http://www-naweb.iaea.org/napc/ih/IHS_resources_isohis.html}). +#' (\url{https://nucleus.iaea.org/wiser/index.aspx}). #' #' These datasets have been aggregated and can thus be directly used for fitting #' isoscapes. #' #' If you want to use your own dataset, you must format your data as these #' datasets. -#' +#' #' @name GNIPDataEUagg #' @aliases GNIPDataEUagg GNIPDataALLagg #' @docType data @@ -546,23 +545,23 @@ NULL #' @source Data provided by the IAEA and processed by us. #' @keywords datasets #' @examples -#' +#' #' head(GNIPDataALLagg) #' dim(GNIPDataALLagg) #' head(GNIPDataEUagg) #' dim(GNIPDataEUagg) -#' +#' NULL #' Colour palettes for plotting -#' +#' #' These datasets contain colour vectors that can be used for plotting. In our #' examples, we use the `isopalette1` for plotting the isoscape using #' [plot.ISOSCAPE] and `isopalette2` for plotting the #' assignment outcome using [plot.ISOFIND]. -#' +#' #' Colour palettes can be created by using the function [colorRamp] #' that interpolates colours between a set of given colours. One can also use #' [colorRampPalette] to create functions providing colours. Also @@ -573,7 +572,7 @@ NULL #' already available such as [terrain.colors] or others available #' (see examples below). Alternatively, you can design your own colour palette #' by writing standard hexadecimal code of colours into a vector. -#' +#' #' @name isopalette2 #' @aliases isopalette2 isopalette1 #' @docType data @@ -581,44 +580,62 @@ NULL #' @note We use the package \pkg{rasterVis} for plotting. Instead of using #' colour palettes directly, one can also use any "Theme" designed for the #' lattice graphic environment (see source for details). -#' @seealso [grDevices::rainbow] for information about R colour palettes,#' +#' @seealso [grDevices::rainbow] for information about R colour palettes, #' [grDevices::colorRamp] and [colorspace::choose_palette] to create your #' own palettes #' @source For information on how to use themes, check: -#' +#' #' \url{https://oscarperpinan.github.io/rastervis/#themes} #' @keywords color datasets #' @examples -#' +#' #' ## A comparison of some colour palette -#' +#' #' par(mfrow = c(2, 3)) -#' pie(rep(1, length(isopalette1)), col = isopalette1, -#' border = NA, labels = NA, clockwise = TRUE, main = "isopalette1") -#' pie(rep(1, length(isopalette2)), col = isopalette2, -#' border = NA, labels = NA, clockwise = TRUE, main = "isopalette2") -#' pie(rep(1, 100), col = terrain.colors(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "terrain.colors") -#' pie(rep(1, 100), col = rainbow(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "rainbow") -#' pie(rep(1, 100), col = topo.colors(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "topo.colors") -#' pie(rep(1, 100), col = heat.colors(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "heat.colors") -#' +#' pie(rep(1, length(isopalette1)), +#' col = isopalette1, +#' border = NA, labels = NA, clockwise = TRUE, main = "isopalette1" +#' ) +#' pie(rep(1, length(isopalette2)), +#' col = isopalette2, +#' border = NA, labels = NA, clockwise = TRUE, main = "isopalette2" +#' ) +#' pie(rep(1, 100), +#' col = terrain.colors(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "terrain.colors" +#' ) +#' pie(rep(1, 100), +#' col = rainbow(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "rainbow" +#' ) +#' pie(rep(1, 100), +#' col = topo.colors(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "topo.colors" +#' ) +#' pie(rep(1, 100), +#' col = heat.colors(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "heat.colors" +#' ) +#' #' ## Creating your own colour palette #' MyPalette <- colorRampPalette(c("blue", "green", "red"), bias = 0.7) #' par(mfrow = c(1, 1)) -#' pie(1:100, col = MyPalette(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "a home-made palette") -#' +#' pie(1:100, +#' col = MyPalette(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "a home-made palette" +#' ) +#' #' ## Turing palettes into functions for use in IsoriX #' Isopalette1Fn <- colorRampPalette(isopalette1, bias = 0.5) #' Isopalette2Fn <- colorRampPalette(isopalette2, bias = 0.5) #' par(mfrow = c(1, 2)) -#' pie(1:100, col = Isopalette1Fn(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "isopalette1") -#' pie(1:100, col = Isopalette2Fn(100), border = NA, labels = NA, -#' clockwise = TRUE, main = "isopalette2") +#' pie(1:100, +#' col = Isopalette1Fn(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "isopalette1" +#' ) +#' pie(1:100, +#' col = Isopalette2Fn(100), border = NA, labels = NA, +#' clockwise = TRUE, main = "isopalette2" +#' ) #' NULL diff --git a/IsoriX/R/IsoriX-package.R b/IsoriX/R/IsoriX-package.R index 7dea5ca..0cfcaf4 100644 --- a/IsoriX/R/IsoriX-package.R +++ b/IsoriX/R/IsoriX-package.R @@ -103,7 +103,7 @@ #' likely origin with the prediction region around it. When several organisms #' are being assigned, both assignments at the level of each sample and a single #' assignment for the whole group can be performed. } -#' +#' #' @name IsoriX-package #' @aliases IsoriX-package IsoriX #' @docType package @@ -111,21 +111,21 @@ #' longitude) of any spatial data (locations, rasters) must be given in decimal #' degrees following the WGS84 spheroid standard. #' @author Alexandre Courtiol \email{alexandre.courtiol@@gmail.com}, -#' +#' #' François Rousset, -#' +#' #' Marie-Sophie Rohwaeder, -#' +#' #' Stephanie Kramer-Schadt \email{kramer@@izw-berlin.de} #' @references Courtiol A, Rousset F, Rohwäder M, Soto DX, Lehnert L, Voigt CC, Hobson KA, #' Wassenaar LI, Kramer-Schadt S (2019). "Isoscape computation and inference of #' spatial origins with mixed models using the R package IsoriX." In Hobson KA, #' Wassenaar LI (eds.), Tracking Animal Migration with Stable Isotopes, second #' edition. Academic Press, London. -#' +#' #' Courtiol A, Rousset F (2017). "Modelling isoscapes using mixed models." bioRxiv. #' doi: 10.1101/207662, [link](https://www.biorxiv.org/content/10.1101/207662v1). -#' +#' #' @keywords package -#' +#' NULL diff --git a/IsoriX/R/S4classes.R b/IsoriX/R/S4classes.R new file mode 100644 index 0000000..32b1618 --- /dev/null +++ b/IsoriX/R/S4classes.R @@ -0,0 +1,60 @@ +# This S4 definitions of classes are required for saveRDS to work and interact properly with the package terra. + +#' Class ISOSCAPE +#' +#' @slot isoscapes a SpatRaster storing the isoscapes +#' @slot sp_points a list of spatial points +#' +#' @export +#' @rdname ISOSCAPE-class +#' +setClass("ISOSCAPE", slots = c( + isoscapes = "SpatRaster", + sp_points = "list" +)) + + +#' Class CALIBFIT +#' +#' @slot method a character string indicating the method used for the calibration +#' @slot species_rand a logical indicating whether the species random effect is included in the model +#' @slot site_rand a logical indicating whether the site random effect is included in the model +#' @slot param the fixed-effect estimates of the calibration function +#' @slot fixefCov the covariance matrix of the fixed effects +#' @slot phi the residual variance of the calibration fit +#' @slot calib_fit the fitted calibration model (if applicable) +#' @slot iso_fit the fitted calibration model (if applicable) +#' @slot data the calibration data +#' @slot sp_points a list of spatial points used for calibration +#' +#' @export +#' @rdname CALIBFIT-class +#' +setClass("CALIBFIT", slots = c( + method = "character", + species_rand = "logical", + site_rand = "logical", + param = "numeric", + fixefCov = "matrix", + phi = "numeric", + calib_fit = "list", + iso_fit = "list", + data = "data.frame", + sp_points = "list" +)) + + +#' Class ISOFIND +#' +#' @slot sample a list of SpatRaster objects storing the assignment info for each sample +#' @slot group a SpatRaster storing the group assignment info +#' @slot sp_points a list of SpatVector storing the spatial points for sources, calibration and assignment samples +#' +#' @export +#' @rdname ISOFIND-class +#' +setClass("ISOFIND", slots = c( + sample = "list", + group = "SpatRaster", + sp_points = "list" +)) diff --git a/IsoriX/R/calibfit.R b/IsoriX/R/calibfit.R index 7e6e17e..4346b2d 100644 --- a/IsoriX/R/calibfit.R +++ b/IsoriX/R/calibfit.R @@ -69,9 +69,9 @@ #' a different calibration sample (i.e. a single isotopic measurement). See #' [CalibDataAlien], [CalibDataBat], or #' [CalibDataBat2] for examples of such a dataset. -#' +#' #' ## Method "lab" -#' +#' #' This calibration method is the one to be used when the calibration data to be #' used correspond to isotopic values recorded for both organisms and their #' environment. We can foresee three main situations in which the "lab" method @@ -97,7 +97,7 @@ #' generally preferable to using the method "desk" described below (less error #' prone and de facto accounting for all five parameters mentioned for the #' method "desk"). -#' +#' #' - **Statistical model**: in this case, the calibration model to be fitted is #' a simple linear model (LM) or a simple linear mixed-effects model (LMM) that #' fits the isotopic values of sedentary organisms as a linear function of the @@ -113,7 +113,7 @@ #' `NULL` in this case (since no isoscape is used, no isoscape fit is #' required to perform the calibration). The model used to fit the calibration #' function has a simple fixed effect structure: an intercept and a slope. -#' +#' #' - **Required calibration data**: the calibration data to be used here must be #' a dataframe (or a tibble) containing at least the following columns: #' - `sample_value`: the isotopic value of the calibration sample @@ -164,7 +164,7 @@ #' Note that if the provided slope is set to 0 and an intercept is considered, #' the calibration methods actually corresponds to the simple consideration of a #' fractionation factor. -#' +#' #' - **Statistical model**: none! #' #' - **Required calibration data** for method "desk": the calibration data to be @@ -176,7 +176,7 @@ #' - `slope_se` (optional): the standard error around the slope #' - `resid_var` (optional): the residual variance (not SD) of a LM #' calibration fit -#' +#' #' - **Required calibration data** for method "desk_inverse": the calibration #' data to be used here must be a dataframe (or a tibble) containing a single #' row with the following columns: @@ -192,13 +192,13 @@ #' - `N`: a *numeric* indicating the sample size of the data used #' for the calibration fit. This is required for pivoting the regression from #' "desk_inverse" to "desk". -#' +#' #' @aliases calibfit print.CALIBFIT summary.CALIBFIT #' @inheritParams isoscape #' @param data A *dataframe* containing the calibration data (see note #' below) #' @param method A *string* indicating the method used to generate the data -#' used for the calibration. By default method is `"wild"`, but the other +#' used for the calibration. By default method is `"wild"`, but the other #' `"lab"`, `"desk"` and `"desk_inverse"`. #' See **Details** for the difference between these three methods. #' @param verbose A *logical* indicating whether information about the @@ -225,164 +225,178 @@ #' inference of spatial origins with mixed models using the R package IsoriX. #' In Hobson KA, Wassenaar LI (eds.), Tracking Animal Migration with Stable #' Isotopes, second edition. Academic Press, London. -#' +#' #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' +#' #' if (getOption_IsoriX("example_maxtime") > 30) { -#' -#' ##################################################### -#' ## 1 Example of calibration using the method "wild" # -#' ##################################################### -#' -#' ## 1.1 We prepare the data to fit the isoscape: -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' ## 1.2 We fit the isoscape models for Germany: -#' GermanFit <- isofit(data = GNIPDataDEagg, -#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' ## 1.3 We fit the calibration model using the method "wild" (the default): -#' CalibAlien <- calibfit(data = CalibDataAlien, isofit = GermanFit) -#' -#' ## 1.4 We explore the outcome of the calibration: -#' CalibAlien -#' summary(CalibAlien) -#' plot(CalibAlien) -#' -#' ## Note 1: you can plot several calibrations at once (using bats this time): -#' CalibBat1 <- calibfit(data = CalibDataBat, isofit = GermanFit) -#' CalibBat2 <- calibfit(data = CalibDataBat2, isofit = GermanFit) -#' plot(CalibBat1) -#' points(CalibBat2, pch = 3, col = "red", CI = list(col = "green")) -#' -#' ## Note 2: you can extract data created by plot() for plotting things yourself: -#' dataplot <- plot(CalibAlien, plot = FALSE) -#' plot(sample_fitted ~ source_value, data = dataplot, -#' xlim = range(dataplot$source_value), -#' ylim = range(dataplot$sample_lwr, dataplot$sample_upr), col = NULL) -#' polygon(x = c(dataplot$source_value, rev(dataplot$source_value)), -#' y = c(dataplot$sample_lwr, rev(dataplot$sample_upr)), -#' col = 3) -#' points(sample_fitted ~ source_value, data = dataplot, type = "l", lty = 2) -#' -#' -#' #################################################### -#' ## 2 Example of calibration using the method "lab" # -#' #################################################### -#' -#' ## 2.0 We create made up data here because we don't have yet a good dataset -#' ## for this case, but you should use your own data instead: -#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -#' set.seed(123) -#' CalibDataAlien2 <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, -#' resid_var = 5), -#' isoscape = GermanScape, -#' raster = ElevRasterDE, -#' n_sites = 25, -#' min_n_samples = 5, -#' max_n_samples = 5) -#' CalibDataAlien2 <- CalibDataAlien2[, c("site_ID", "sample_ID", "source_value", -#' "sample_value")] -#' head(CalibDataAlien2) ## your data should have this structure -#' -#' ## 2.1 We fit the calibration model using the method "lab": -#' CalibAlien2 <- calibfit(data = CalibDataAlien2, method = "lab") -#' -#' ## 2.2 We explore the outcome of the calibration: -#' CalibAlien2 -#' summary(CalibAlien2) -#' plot(CalibAlien2) -#' -#' -#' ##################################################### -#' ## 3 Example of calibration using the method "desk" # -#' ##################################################### -#' -#' ## 3.1 We format the information about the calibration function to be used -#' ## as a dataframe: -#' CalibDataAlien3 <- data.frame(intercept = 1.67, slope = 0.48, -#' intercept_se = 1.65, slope_se = 0.03, -#' resid_var = 3.96) -#' CalibDataAlien3 -#' -#' ## 3.2 We fit the calibration model using the method "desk": -#' CalibAlien3 <- calibfit(data = CalibDataAlien3, method = "desk") -#' -#' ## 3.3 We explore the outcome of the calibration: -#' CalibAlien3 -#' summary(CalibAlien3) -#' plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) -#' -#' ## Note: the desk function also work with just intercept and slope: -#' CalibDataAlien4 <- CalibDataAlien3[, c("intercept", "slope")] -#' CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk") -#' CalibAlien4 -#' summary(CalibAlien4) -#' plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) -#' points(CalibAlien4, line = list(col = "orange")) -#' ## Regression lines are the same, but the new calibration does not have a -#' ## confidence intervals since we provided no uncertainty measure in -#' ## CalibDataAlien4, which will make a difference during assignments... -#' -#' -#' ############################################################# -#' ## 4 Example of calibration using the method "desk_inverse" # -#' ############################################################# -#' -#' ## 4.1 We format the information about the calibration function to be used -#' ## as a dataframe: -#' CalibDataAlien4 <- data.frame(intercept = -16.98822, slope = 1.588885, -#' intercept_se = 2.200435, slope_se = 0.08106032, -#' resid_var = 13.15102, N = 125, sign_mean_Y = -1) -#' CalibDataAlien4 -#' -#' ## 4.2 We fit the calibration model using the method "desk_inverse": -#' CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk_inverse") -#' -#' ## 4.3 We explore the outcome of the calibration: -#' CalibAlien4 -#' summary(CalibAlien4) -#' plot(CalibAlien4, xlim = c(-100, 100), ylim = c(-50, 50)) +#' ##################################################### +#' ## 1 Example of calibration using the method "wild" # +#' ##################################################### +#' +#' ## 1.1 We prepare the data to fit the isoscape: +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' ## 1.2 We fit the isoscape models for Germany: +#' GermanFit <- isofit( +#' data = GNIPDataDEagg, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' ## 1.3 We fit the calibration model using the method "wild" (the default): +#' CalibAlien <- calibfit(data = CalibDataAlien, isofit = GermanFit) +#' +#' ## 1.4 We explore the outcome of the calibration: +#' CalibAlien +#' summary(CalibAlien) +#' plot(CalibAlien) +#' +#' ## Note 1: you can plot several calibrations at once (using bats this time): +#' CalibBat1 <- calibfit(data = CalibDataBat, isofit = GermanFit) +#' CalibBat2 <- calibfit(data = CalibDataBat2, isofit = GermanFit) +#' plot(CalibBat1) +#' points(CalibBat2, pch = 3, col = "red", CI = list(col = "green")) +#' +#' ## Note 2: you can extract data created by plot() +#' ## for plotting things yourself: +#' dataplot <- plot(CalibAlien, plot = FALSE) +#' plot(sample_fitted ~ source_value, +#' data = dataplot, +#' xlim = range(dataplot$source_value), +#' ylim = range(dataplot$sample_lwr, dataplot$sample_upr), col = NULL +#' ) +#' polygon( +#' x = c(dataplot$source_value, rev(dataplot$source_value)), +#' y = c(dataplot$sample_lwr, rev(dataplot$sample_upr)), +#' col = 3 +#' ) +#' points(sample_fitted ~ source_value, data = dataplot, type = "l", lty = 2) +#' +#' +#' #################################################### +#' ## 2 Example of calibration using the method "lab" # +#' #################################################### +#' +#' ## 2.0 We create made up data here because we don't have yet a good dataset +#' ## for this case, but you should use your own data instead: +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' set.seed(123) +#' CalibDataAlien2 <- create_aliens( +#' calib_fn = list( +#' intercept = 3, slope = 0.5, +#' resid_var = 5 +#' ), +#' isoscape = GermanScape, +#' raster = ElevRasterDE, +#' n_sites = 25, +#' min_n_samples = 5, +#' max_n_samples = 5 +#' ) +#' CalibDataAlien2 <- CalibDataAlien2[, c( +#' "site_ID", "sample_ID", +#' "source_value", "sample_value" +#' )] +#' head(CalibDataAlien2) ## your data should have this structure +#' +#' ## 2.1 We fit the calibration model using the method "lab": +#' CalibAlien2 <- calibfit(data = CalibDataAlien2, method = "lab") +#' +#' ## 2.2 We explore the outcome of the calibration: +#' CalibAlien2 +#' summary(CalibAlien2) +#' plot(CalibAlien2) +#' +#' +#' ##################################################### +#' ## 3 Example of calibration using the method "desk" # +#' ##################################################### +#' +#' ## 3.1 We format the information about the calibration function to be used +#' ## as a dataframe: +#' CalibDataAlien3 <- data.frame( +#' intercept = 1.67, slope = 0.48, +#' intercept_se = 1.65, slope_se = 0.03, +#' resid_var = 3.96 +#' ) +#' CalibDataAlien3 +#' +#' ## 3.2 We fit the calibration model using the method "desk": +#' CalibAlien3 <- calibfit(data = CalibDataAlien3, method = "desk") +#' +#' ## 3.3 We explore the outcome of the calibration: +#' CalibAlien3 +#' summary(CalibAlien3) +#' plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) +#' +#' ## Note: the desk function also work with just intercept and slope: +#' CalibDataAlien4 <- CalibDataAlien3[, c("intercept", "slope")] +#' CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk") +#' CalibAlien4 +#' summary(CalibAlien4) +#' plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) +#' points(CalibAlien4, line = list(col = "orange")) +#' ## Regression lines are the same, but the new calibration does not have a +#' ## confidence intervals since we provided no uncertainty measure in +#' ## CalibDataAlien4, which will make a difference during assignments... +#' +#' +#' ############################################################# +#' ## 4 Example of calibration using the method "desk_inverse" # +#' ############################################################# +#' +#' ## 4.1 We format the information about the calibration function to be used +#' ## as a dataframe: +#' CalibDataAlien4 <- data.frame( +#' intercept = -16.98822, slope = 1.588885, +#' intercept_se = 2.200435, slope_se = 0.08106032, +#' resid_var = 13.15102, N = 125, sign_mean_Y = -1 +#' ) +#' CalibDataAlien4 +#' +#' ## 4.2 We fit the calibration model using the method "desk_inverse": +#' CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk_inverse") +#' +#' ## 4.3 We explore the outcome of the calibration: +#' CalibAlien4 +#' summary(CalibAlien4) +#' plot(CalibAlien4, xlim = c(-100, 100), ylim = c(-50, 50)) #' } -#' +#' #' @export calibfit <- function(data, isofit = NULL, method = c("wild", "lab", "desk", "desk_inverse"), verbose = interactive(), - control_optim = list() -) { - + control_optim = list()) { ## checking inputs if (inherits(isofit, "MULTIISOFIT")) { stop("object 'isofit' of class MULTIISOFIT; calibration have not yet been implemented for this situation.") } - + method <- match.arg(method, c("wild", "lab", "desk", "desk_inverse")) - + ## Note: part of the code is prepared to use species_rand as an argument (with NULL = automatic selection) ## That would allow to fit species as a random effect in the model ## However, it is not obvious that it would make sense to do that as it may ## remove variance that should be captured during the assignment. ## We thus disregard this term for now. species_rand <- FALSE - + species_info <- "species_ID" %in% colnames(data) if (!is.null(species_rand)) { if (!species_info && species_rand) { stop("The random effect for species cannot be fit if data does not contain a column called species_ID") } } - + ## prepare the dataset data <- .prepare_data_calib(data, method = method) - + ## set species_rand (not used for now) if (!species_info) { species_rand <- FALSE @@ -390,53 +404,53 @@ calibfit <- function(data, nb_species <- length(unique(data$species_ID)) species_rand <- ifelse(is.null(species_rand) && nb_species > 4, TRUE, FALSE) } - + ## apply the calibration method result_calib <- switch(method, - wild = .calibfit_wild(data = data, - isofit = isofit, - species_rand = species_rand, - verbose = verbose, - control_optim = control_optim), - lab = .calibfit_lab(data = data, - species_rand = species_rand, - verbose = verbose), - desk = .calibfit_desk(data = data, verbose = verbose), - desk_inverse = .calibfit_desk_inverse(data = data, verbose = verbose) + wild = .calibfit_wild( + data = data, + isofit = isofit, + species_rand = species_rand, + verbose = verbose, + control_optim = control_optim + ), + lab = .calibfit_lab( + data = data, + species_rand = species_rand, + verbose = verbose + ), + desk = .calibfit_desk(data = data, verbose = verbose), + desk_inverse = .calibfit_desk_inverse(data = data, verbose = verbose) ) - + class(result_calib) <- c("CALIBFIT", "list") - + return(invisible(result_calib)) } .prepare_data_calib <- function(data, method, weighting = NULL) { ## This function should not be called by the user. ## It prepares data for the calibration procedure. - + if (method == "wild") { - ## Checking the inputs if (!all(c("lat", "long") %in% colnames(data))) { stop("The dataset does not seem to contain the required variable(s) 'lat' and/or 'long'.") - } + } if (is.null(data$sample_value)) { stop("The dataset does not seem to contain the required variable 'sample_value'.") } if (is.null(data$site_ID)) { stop("The dataset does not seem to contain the required variable 'site_ID'.") } - + ## Preparing the inputs data$site_ID <- factor(data$site_ID) data$lat_abs <- abs(data$lat) data$lat_2 <- data$lat^2 data$long_2 <- data$long^2 data$source_ID <- as.factor(paste("new", data$site_ID, sep = "_")) - } - - else if (method == "lab") { - + } else if (method == "lab") { ## Checking the inputs if (is.null(data$sample_value)) { stop("The dataset does not seem to contain the required variable 'sample_value'.") @@ -444,15 +458,12 @@ calibfit <- function(data, if (is.null(data$source_value)) { stop("The dataset does not seem to contain the required variable 'source_value'.") } - + ## Preparing the inputs if (!is.null(data$site_ID)) { data$site_ID <- factor(data$site_ID) } - } - - else if (method == "desk" || method == "desk_inverse") { - + } else if (method == "desk" || method == "desk_inverse") { ## Checking the inputs if (is.null(data$intercept)) { stop("The dataset does not seem to contain the required variable 'intercept'.") @@ -463,13 +474,10 @@ calibfit <- function(data, if (nrow(data) > 1) { stop("The selected calibration method requires that data contains only a single row.") } - + ## Preparing the inputs ### No preparation needed - } - - else if (method == "desk_inverse") { - + } else if (method == "desk_inverse") { ## Checking the inputs if (is.null(data$intercept_se)) { stop("The dataset does not seem to contain the required variable 'intercept_se'.") @@ -486,17 +494,15 @@ calibfit <- function(data, if (is.null(data$N)) { stop("The dataset does not seem to contain the required variable 'N'.") } - + ## Preparing the inputs ### No preparation needed - } - - else { + } else { stop("method argument not recognised") } - + data <- droplevels(data) - + if (!is.null(weighting)) { precipitations <- terra::extract(weighting, cbind(data$long, data$lat))[[1]] if (anyNA(precipitations)) { @@ -515,30 +521,43 @@ calibfit <- function(data, control_optim = list()) { ## This function should not be called by the user. ## It fits the calibration model according to the "wild" method. - + if (verbose) { print("starting calibration method 'wild'") } - + time <- system.time({ ## predict isoscape and associated prediction ## covariance matrix at animal locations - + if (verbose) { print("predicting the isoscape value in each calibration site...") } - - mean_calib <- spaMM::predict.HLfit(isofit[["mean_fit"]], - newdata = data, - variances = list(predVar = TRUE, cov = TRUE)) - + + mean_calib_obj <- .safe_and_quiet_predictions(isofit[["mean_fit"]], + newdata = data, + variances = list(predVar = TRUE, cov = TRUE) + ) + + mean_calib <- mean_calib_obj$result + + if (length(mean_calib_obj$messages) > 0) { + message("The following messages were produced by the predictions step during calibration: ") + message(mean_calib_obj$messages) + } + + if (length(mean_calib_obj$warnings) > 0) { + message("The following warnings were produced by the predictions step during calibration: ") + message(mean_calib_obj$warnings) + } + ## store the mean prediction data$mean_source_value <- c(mean_calib) - + ## Test for extrapolation during calibration issues_extrapolations <- FALSE msg <- paste("Note: extrapolation issues\nOut of your", nrow(data), "calibration samples,\n") - + ### check if spatial extrapolation occurs coord_points <- isofit$info_fit$data[, c("long", "lat")] points_contour <- grDevices::chull(coord_points) @@ -551,29 +570,29 @@ calibfit <- function(data, issues_extrapolations <- TRUE msg <- paste(msg, "*", sum(points_out), "correspond to locations outside the area covered by the measurements you used to build your isoscape.\n") } - + ## display message if necessary if (issues_extrapolations) { message(paste0(msg, "--> These cases correspond to extrapolation during the calibration step, which could impede the reliability of your assignments.\nIf the proportion of problematic samples is large, you should perhaps rethink the design of your isoscape and/or collect more calibration data within the expected range to avoid any problem.")) } - + ## extract the prediction covariance matrix predcov_matrix_isofit_full <- attr(mean_calib, "predVar") - + ## extract the prediction variances data$mean_predVar_source <- diag(predcov_matrix_isofit_full) - + ## reshape the prediction covariance matrix to number of unique sites firstoccurences <- match(levels(data$site_ID), data$site_ID) predcov_isofit <- predcov_matrix_isofit_full[firstoccurences, firstoccurences] rownames(predcov_isofit) <- levels(data$site_ID) colnames(predcov_isofit) <- levels(data$site_ID) - + ### fitting the calibration function if (verbose) { print("fitting the calibration function...") } - + ## Defining the calibration function objective_fn_calib <- function(param, data, @@ -599,11 +618,12 @@ calibfit <- function(data, data = data, method = lik_method ) - if (return_fit) + if (return_fit) { return(calib_fit) + } return(calib_fit$APHLs$p_v) } - + ## estimation of intercept and slope of the calibration function opt_res <- stats::optim( par = c(0, 1), @@ -614,10 +634,10 @@ calibfit <- function(data, species_rand = species_rand, lik_method = "REML" ) - + param_calibfit <- opt_res$par names(param_calibfit) <- c("intercept", "slope") - + ## fit of the calibration function calib_fit <- objective_fn_calib( param = param_calibfit, @@ -627,12 +647,12 @@ calibfit <- function(data, lik_method = "REML", return_fit = TRUE ) - + ## computing the covariance matrix of fixed effects if (verbose) { print("computing the covariance matrix of fixed effects...") } - + fixefCov_calibfit <- solve( -numDeriv::hessian( objective_fn_calib, @@ -643,33 +663,36 @@ calibfit <- function(data, lik_method = "ML" ) ) - + rownames(fixefCov_calibfit) <- names(param_calibfit) colnames(fixefCov_calibfit) <- names(param_calibfit) - }) ## end of system.time - + ## display time time <- round(as.numeric((time)[3])) if (verbose) { print(paste0("the calibration procedure based on ", nrow(data), " calibration samples has been completed in ", time, "s.")) } - + ## we create the spatial points for calibration points - calib_points <- .create_spatial_points(long = data$long, - lat = data$lat, - proj = "+proj=longlat +datum=WGS84") - - return(list("method" = "wild", - "species_rand" = species_rand, - "site_rand" = TRUE, - "param" = param_calibfit, - "fixefCov" = fixefCov_calibfit, - "phi" = calib_fit$phi, - "calib_fit" = calib_fit, - "iso_fit" = isofit, - "data" = data, - "sp_points" = list(calibs = calib_points))) + calib_points <- .create_spatial_points( + long = data$long, + lat = data$lat, + proj = "+proj=longlat +datum=WGS84" + ) + + return(list( + "method" = "wild", + "species_rand" = species_rand, + "site_rand" = TRUE, + "param" = param_calibfit, + "fixefCov" = fixefCov_calibfit, + "phi" = calib_fit$phi, + "calib_fit" = calib_fit, + "iso_fit" = isofit, + "data" = data, + "sp_points" = list(calibs = calib_points) + )) } .calibfit_lab <- function(data, @@ -677,13 +700,12 @@ calibfit <- function(data, verbose = interactive()) { ## This function should not be called by the user. ## It fits the calibration model according to the "lab" method. - + if (verbose) { print("starting calibration method 'lab'") } - + time <- system.time({ - ## determine if site_ID random term needed if (!"site_ID" %in% colnames(data)) { site_rand <- FALSE @@ -691,98 +713,103 @@ calibfit <- function(data, nb_sites <- length(unique(data$site_ID)) site_rand <- ifelse(nb_sites > 4, TRUE, FALSE) } - + ### fitting the calibration function calib_formula <- "sample_value ~ source_value" - + if (species_rand) { calib_formula <- paste(calib_formula, "+ (1|species_ID)") } - + if (site_rand) { calib_formula <- paste(calib_formula, "+ (1|site_ID)") } - + calib_fit <- spaMM::fitme( formula = stats::formula(calib_formula), data = data, - method = "REML") - + method = "REML" + ) + param_calibfit <- spaMM::fixef(calib_fit) names(param_calibfit) <- c("intercept", "slope") - + ## extracting the covariance matrix of fixed effects - + fixefCov_calibfit <- stats::vcov(calib_fit) - + rownames(fixefCov_calibfit) <- names(param_calibfit) colnames(fixefCov_calibfit) <- names(param_calibfit) - }) ## end of system.time - + ## display time time <- round(as.numeric((time)[3])) if (verbose) { print(paste0("the calibration procedure based on ", nrow(data), "calibration samples has been completed in ", time, "s.")) } - + ## we create the spatial points for calibration points if (all(c("long", "lat") %in% colnames(data))) { - calib_points <- .create_spatial_points(long = data$long, - lat = data$lat, - proj = "+proj=longlat +datum=WGS84") + calib_points <- .create_spatial_points( + long = data$long, + lat = data$lat, + proj = "+proj=longlat +datum=WGS84" + ) } else { calib_points <- NULL } - - return(list("method" = "lab", - "species_rand" = species_rand, - "site_rand" = site_rand, - "param" = param_calibfit, - "fixefCov" = fixefCov_calibfit, - "phi" = calib_fit$phi, - "calib_fit" = calib_fit, - "iso_fit" = list(), - "data" = data, - "sp_points" = list(calibs = calib_points))) + + return(list( + "method" = "lab", + "species_rand" = species_rand, + "site_rand" = site_rand, + "param" = param_calibfit, + "fixefCov" = fixefCov_calibfit, + "phi" = calib_fit$phi, + "calib_fit" = calib_fit, + "iso_fit" = list(), + "data" = data, + "sp_points" = list(calibs = calib_points) + )) } .calibfit_desk <- function(data, verbose = interactive()) { ## This function should not be called by the user. ## It fits the calibration model according to the "desk" method (aka "dirty"). - + message("Note: this calibration method is not recommended since it does not account for the required covariance terms needed to perform reliable assignments. See ?calibfit for details.") - + phi_calibfit <- ifelse(!is.null(data$resid_var), data$resid_var, 0) - + fixefCov_calibfit <- matrix(0, nrow = 2, ncol = 2) rownames(fixefCov_calibfit) <- c("intercept", "slope") colnames(fixefCov_calibfit) <- c("intercept", "slope") - + if (!is.null(data$intercept_se)) { fixefCov_calibfit[1, 1] <- data$intercept_se^2 } - + if (!is.null(data$slope_se)) { fixefCov_calibfit[2, 2] <- data$slope_se^2 } - + if (verbose) { print(paste("the calibration data have been loaded.")) } - - return(list("method" = "desk", - "species_rand" = FALSE, - "site_rand" = FALSE, - "param" = c(intercept = data$intercept, slope = data$slope), - "fixefCov" = fixefCov_calibfit, - "phi" = phi_calibfit, - "calib_fit" = list(), - "iso_fit" = list(), - "data" = data, - "sp_points" = NULL)) - + + return(list( + "method" = "desk", + "species_rand" = FALSE, + "site_rand" = FALSE, + "param" = c(intercept = data$intercept, slope = data$slope), + "fixefCov" = fixefCov_calibfit, + "phi" = phi_calibfit, + "calib_fit" = list(), + "iso_fit" = list(), + "data" = data, + "sp_points" = NULL + )) } @@ -790,22 +817,25 @@ calibfit <- function(data, verbose = interactive()) { ## This function should not be called by the user. ## It fits the calibration model according to the "desk" method (aka "dirty"). - - inv_reg <- .invert_reg(intercept = data$intercept, - slope = data$slope, - SE_I = data$intercept_se, - SE_S = data$slope_se, - phi = data$resid_var, - N = data$N, - sign_mean_Y = data$sign_mean_Y + + inv_reg <- .invert_reg( + intercept = data$intercept, + slope = data$slope, + SE_I = data$intercept_se, + SE_S = data$slope_se, + phi = data$resid_var, + N = data$N, + sign_mean_Y = data$sign_mean_Y + ) + + data_transformed <- data.frame( + intercept = inv_reg$intercept, + slope = inv_reg$slope, + intercept_se = inv_reg$SE_I, + slope_se = inv_reg$SE_S, + resid_var = inv_reg$phi ) - - data_transformed <- data.frame(intercept = inv_reg$intercept, - slope = inv_reg$slope, - intercept_se = inv_reg$SE_I, - slope_se = inv_reg$SE_S, - resid_var = inv_reg$phi) - + .calibfit_desk(data = data_transformed, verbose = verbose) } @@ -825,7 +855,7 @@ print.CALIBFIT <- function(x, ...) { cat("sample_value = intercept + slope * source_value + (1|species_ID) + (1|site_ID) + Error", "\n") } else if (!x$species_rand && x$site_rand) { cat("sample_value = intercept + slope * source_value + (1|site_ID) + Error", "\n") - } else if (x$species_rand && !x$site_rand) { + } else if (x$species_rand && !x$site_rand) { cat("sample_value = intercept + slope * source_value + (1|species_ID) + Error", "\n") } else if (!x$species_rand && !x$site_rand) { cat("sample_value = intercept + slope * source_value + Error", "\n") @@ -836,14 +866,18 @@ print.CALIBFIT <- function(x, ...) { stop("method unknown") } cat("\n") - cat(paste(" intercept (+/- SE) =", .print_nice_and_round(x$param["intercept"], 2), - "+/-", .print_nice_and_round(sqrt(x$fixefCov["intercept", "intercept"]), 2)), "\n") - cat(paste(" slope (+/- SE) =", .print_nice_and_round(x$param["slope"], 2), - "+/-", .print_nice_and_round(sqrt(x$fixefCov["slope", "slope"]), 2)), "\n") + cat(paste( + " intercept (+/- SE) =", .print_nice_and_round(x$param["intercept"], 2), + "+/-", .print_nice_and_round(sqrt(x$fixefCov["intercept", "intercept"]), 2) + ), "\n") + cat(paste( + " slope (+/- SE) =", .print_nice_and_round(x$param["slope"], 2), + "+/-", .print_nice_and_round(sqrt(x$fixefCov["slope", "slope"]), 2) + ), "\n") cat("\n") cat("[for more information, use summary()]", "\n") cat("\n") - + return(invisible(NULL)) } @@ -872,4 +906,3 @@ summary.CALIBFIT <- function(object, ...) { } return(invisible(NULL)) } - diff --git a/IsoriX/R/create_aliens.R b/IsoriX/R/create_aliens.R index f409b19..ba708c6 100644 --- a/IsoriX/R/create_aliens.R +++ b/IsoriX/R/create_aliens.R @@ -17,7 +17,7 @@ #' frame containing three columns (`site_ID`, `long` and `lat`) to input the #' coordinate of the sampling site manually. #' -#' Irrespectively of how locations are chosen, a random number of observations +#' Irrespective of how locations are chosen, a random number of observations #' will be drawn, at each site, according to a uniform distribution bounded by #' the values of the argument `min_n_samples` and `max_n_samples`. #' @@ -28,84 +28,88 @@ #' organisms. The actual values is then drawn from a Gaussian distribution #' centred around such mean and a variance defined by the residual variance #' (`resid_var`) input within the list `calib_fn`. -#' -#' @param calib_fn A *list* containing the parameter values describing the +#' +#' @param calib_fn A *list* containing the parameter values describing the #' relationship between the isotope values in the environment and those in the #' simulated organisms. This list must contain three parameters: the #' intercept, the slope, and the residual variance. -#' +#' #' @param isoscape The output of the function [isoscape] -#' -#' @param coordinates An optional *data.frame* with columns `site_ID`, +#' +#' @param coordinates An optional *data.frame* with columns `site_ID`, #' `long` and `lat` -#' +#' #' @param raster A *SpatRaster* containing an elevation raster -#' +#' #' @param n_sites The number of sites from which the simulated organisms #' originate (*integer*) -#' +#' #' @param min_n_samples The minimal number of observations (*integer*) per #' site -#' +#' #' @param max_n_samples The maximal number of observations (*integer*) per #' site -#' +#' #' @return This functions returns a *data.frame* (see example for column #' names) -#' +#' #' @seealso [calibfit] for a calibration based on simulated data -#' +#' #' [isofind] for an assignment based on simulated data -#' +#' #' [IsoriX] for the complete work-flow of our package #' @keywords simulate simulation #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## We fit the models for Germany -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' GermanFit <- isofit(data = GNIPDataDEagg) -#' -#' ## We build the isoscapes -#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -#' -#' ## We create a simulated dataset with 25 sites and 5 observations per site -#' Aliens <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), -#' isoscape = GermanScape, -#' raster = ElevRasterDE, -#' n_sites = 25, -#' min_n_samples = 5, -#' max_n_samples = 5) -#' -#' ## We display the simulated dataset -#' Aliens -#' -#' ## We plot the relationship between the environmental isotope values -#' ## and those from the simulated organisms -#' plot(sample_value ~ source_value, data = Aliens, ylab = "Tissue", xlab = "Environment") -#' abline(3, 0.5, col = "blue") ## the true relationship -#' -#' ## We create a simulated dataset with 2 sites imputing coordinates manually -#' Aliens2 <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), -#' isoscape = GermanScape, -#' coordinates = data.frame(site_ID = c("Berlin", "Bielefeld"), -#' long = c(13.52134, 8.49914), -#' lat = c(52.50598, 52.03485)), -#' raster = ElevRasterDE, -#' min_n_samples = 5, -#' max_n_samples = 5) -#' -#' Aliens2 #' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We fit the models for Germany +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' GermanFit <- isofit(data = GNIPDataDEagg) +#' +#' ## We build the isoscapes +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' +#' ## We create a simulated dataset with 25 sites and 5 observations per site +#' Aliens <- create_aliens( +#' calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), +#' isoscape = GermanScape, +#' raster = ElevRasterDE, +#' n_sites = 25, +#' min_n_samples = 5, +#' max_n_samples = 5 +#' ) +#' +#' ## We display the simulated dataset +#' Aliens +#' +#' ## We plot the relationship between the environmental isotope values +#' ## and those from the simulated organisms +#' plot(sample_value ~ source_value, data = Aliens, ylab = "Tissue", xlab = "Environment") +#' abline(3, 0.5, col = "blue") ## the true relationship +#' +#' ## We create a simulated dataset with 2 sites imputing coordinates manually +#' Aliens2 <- create_aliens( +#' calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), +#' isoscape = GermanScape, +#' coordinates = data.frame( +#' site_ID = c("Berlin", "Bielefeld"), +#' long = c(13.52134, 8.49914), +#' lat = c(52.50598, 52.03485) +#' ), +#' raster = ElevRasterDE, +#' min_n_samples = 5, +#' max_n_samples = 5 +#' ) +#' +#' Aliens2 #' } -#' +#' #' @export create_aliens <- function(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), isoscape = NULL, @@ -114,16 +118,15 @@ create_aliens <- function(calib_fn = list(intercept = 3, slope = 0.5, resid_var n_sites = NA, min_n_samples = 1, max_n_samples = 10) { - ## Complete the arguments - .complete_args(create_aliens) - + .complete_args(create_aliens) + ## Choose location for the aliens if (length(coordinates) == 1 && is.na(coordinates)) { LocationData <- data.frame(site_ID = sample(1:terra::ncell(isoscape$isoscape$mean), n_sites, replace = FALSE)) xy <- terra::xyFromCell(isoscape$isoscape$mean, LocationData$site_ID) LocationData$long <- xy[, "x"] - LocationData$lat <- xy[, "y"] + LocationData$lat <- xy[, "y"] } else { if (!all(c("site_ID", "long", "lat") %in% colnames(coordinates))) { stop("the argument coordinates must contain the columns 'site_ID', 'long' and 'lat'") @@ -135,25 +138,26 @@ create_aliens <- function(calib_fn = list(intercept = 3, slope = 0.5, resid_var xy <- coordinates[, c("long", "lat")] n_sites <- nrow(coordinates) } - LocationData$elev = terra::extract(x = raster, y = xy)[[1]] + LocationData$elev <- terra::extract(x = raster, y = xy)[[1]] LocationData$n_samples <- round(stats::runif(n = n_sites, min = min_n_samples, max = max_n_samples)) - + ## Predict environmental values at the locations LocationData$source_value <- terra::extract(isoscape$isoscapes$mean, xy)[[1]] - + ## Replicate the dataset per animal - AlienData <- LocationData[rep(1:nrow(LocationData), times = LocationData$n_samples), ] - AlienData$sample_ID <- factor(paste("Alien", 1:nrow(AlienData), sep = "_")) - + AlienData <- LocationData[rep(seq_len(nrow(LocationData)), times = LocationData$n_samples), ] + AlienData$sample_ID <- factor(paste("Alien", seq_len(nrow(AlienData)), sep = "_")) + ## Turning site_ID into a factor AlienData$site_ID <- factor(AlienData$site_ID) - + ## Predict the tissue value for each animal - AlienData$sample_value <- stats::rnorm(n = nrow(AlienData), - mean = calib_fn$intercept + AlienData$source_value * calib_fn$slope, - sd = sqrt(calib_fn$resid_var)) + AlienData$sample_value <- stats::rnorm( + n = nrow(AlienData), + mean = calib_fn$intercept + AlienData$source_value * calib_fn$slope, + sd = sqrt(calib_fn$resid_var) + ) ## Cleanup and return rownames(AlienData) <- NULL return(AlienData[, c("site_ID", "long", "lat", "elev", "sample_ID", "source_value", "sample_value")]) } - diff --git a/IsoriX/R/defunct.R b/IsoriX/R/defunct.R index 74dde93..0b7bd39 100644 --- a/IsoriX/R/defunct.R +++ b/IsoriX/R/defunct.R @@ -1,9 +1,9 @@ #' Defunct and deprecated functions -#' +#' #' The function you asked help for has been defunct (i.e. it does not longer #' exists) or deprecated (i.e. it will disappear soon). A new function with a #' different name is surely doing the old job. -#' +#' #' @param ... The call of the defunct or deprecated function #' @aliases Calibfit GetElev Isofit Isorix Isoscape Isosim QueryGNIP queryGNIP prepdata prepelev prepiso RElevate relevate #' @name IsoriX-defunct diff --git a/IsoriX/R/getelev.R b/IsoriX/R/getelev.R index 4762605..aa0abbd 100644 --- a/IsoriX/R/getelev.R +++ b/IsoriX/R/getelev.R @@ -1,5 +1,5 @@ #' Download an elevation raster from internet -#' +#' #' The function `getelev` downloads an elevation raster from internet. It #' is a wrapper that 1) calls the function [elevatr::get_elev_raster] to #' download the data and 2) saves the downloaded raster on the hard drive (so @@ -14,21 +14,21 @@ #' increasing the value of the argument `z`. You can also restrict the area #' to be downloaded using the arguments `long_min`, `long_max`, `lat_min` & #' `lat_max`. -#' +#' #' Note that when using [prepraster] you will be able to reduce the resolution #' and restrict the boundaries of this elevation raster, but you won't be able #' to increase the resolution or expend the boundaries. As a consequence, it is #' probably a good idea to overshoot a little when using `getelev` and #' download data at a resolution slightly higher than you need and for a extent #' larger than your data. -#' +#' #' You can customise further what you download by using other parameters of #' [elevatr::get_elev_raster] (via the elipsis `...`). -#' +#' #' Please refer to the documentation of #' [elevatr::get_elev_raster] for information on the sources and follows link in #' there to know how to cite them. -#' +#' #' @inheritParams prepsources #' @param file A *string* indicating where to store the file on the hard #' drive (default = `~/elevation_world_z5.tif`) @@ -51,11 +51,11 @@ #' #' @return This function returns the full path where the file has been stored #' @examples -#' +#' #' ## To download the high resolution #' ## raster at the default location, just type: #' ## getelev() -#' +#' #' @export getelev <- function(file = "~/elevation_world_z5.tif", z = 5, @@ -68,20 +68,18 @@ getelev <- function(file = "~/elevation_world_z5.tif", overwrite = FALSE, Ncpu = getOption_IsoriX("Ncpu"), verbose = interactive(), - ... -) { - + ...) { ## Checking that elevatr is installed if (!requireNamespace("elevatr", quietly = TRUE)) { stop("You must install the package elevatr for this function to run: `install.packages('elevatr')`") } - + ## Turning path into canonical form ## (this avoids the problem of using the wrong slashes and so on) file <- normalizePath(file, mustWork = FALSE) - + path <- dirname(file) - + if (path == ".") { path <- getwd() } else if (!dir.exists(path)) { @@ -91,13 +89,12 @@ getelev <- function(file = "~/elevation_world_z5.tif", } dir.create(path, recursive = TRUE) } - + ## Applying margin_pct if (margin_pct != 0) { - - margin_long_extra <- (long_max - long_min) * margin_pct/100 - margin_lat_extra <- (lat_max - lat_min) * margin_pct/100 - + margin_long_extra <- (long_max - long_min) * margin_pct / 100 + margin_lat_extra <- (lat_max - lat_min) * margin_pct / 100 + if (long_min > -180) { long_min <- max(c(-180, long_min - margin_long_extra)) } @@ -111,40 +108,43 @@ getelev <- function(file = "~/elevation_world_z5.tif", lat_max <- min(c(90, lat_max + margin_lat_extra)) } } - + ## Conditional file download - if (file.exists(file) & !overwrite) { - message(paste("the file", basename(file), "is already present in", path, - "so it won't be downloaded again unless you set the argument overwrite to TRUE\n" - ) - ) + if (file.exists(file) && !overwrite) { + message(paste( + "the file", basename(file), "is already present in", path, + "so it won't be downloaded again unless you set the argument overwrite to TRUE\n" + )) } else { - if (verbose) print("Downloading and formating the elevation raster... (be patient)") - elev <- elevatr::get_elev_raster(locations = data.frame(x = c(long_min, long_max), - y = c(lat_min, lat_max)), - z = z, - prj = "+proj=longlat +datum=WGS84 +no_defs", - clip = "bbox", - override_size_check = override_size_check, - ncpu = Ncpu, - verbose = verbose, - ...) - + elev <- elevatr::get_elev_raster( + locations = data.frame( + x = c(long_min, long_max), + y = c(lat_min, lat_max) + ), + z = z, + prj = "+proj=longlat +datum=WGS84 +no_defs", + clip = "bbox", + override_size_check = override_size_check, + ncpu = Ncpu, + verbose = verbose, + ... + ) + if (verbose) print("Writing the elevation raster on the disk...") terra::writeRaster(elev, filename = file, overwrite = overwrite) if (verbose) print("Done.") } - + message("you can load your elevation raster as follows:") message(paste0("elev_raster <- terra::rast('", file, "')")) - + return(invisible(file)) } #' Download rasters of monthly precipitation from internet -#' +#' #' The function `getprecip` allows for the download of rasters of monthly #' precipitation from internet. It downloads the "precipitation (mm) WorldClim #' Version 2.1" at a spatial resolution of 30 seconds (~1 km2). After download, @@ -154,13 +154,13 @@ getelev <- function(file = "~/elevation_world_z5.tif", #' [prepcipitate]. It can then be used to predict annual averages #' precipitation weighted isoscapes with the function #' [isomultiscape]. -#' -#' In the argument "path" is not provided, the file will be stored in the -#' current working directory. The functions can create new directories, so you +#' +#' In the argument "path" is not provided, the file will be stored in the +#' current working directory. The functions can create new directories, so you #' can also indicate a new path. The integrity of the elevation raster is tested #' after a call to `getprecip`. In case of corruption, try downloading the #' file again, specifying overwrite = TRUE to overwrite the corrupted file. -#' +#' #' @inheritParams getelev #' @param path A *string* indicating where to store the file on the hard #' drive (without the file name!). Default = current directory. @@ -170,24 +170,22 @@ getelev <- function(file = "~/elevation_world_z5.tif", #' session and `FALSE` otherwise. If a *numeric* is provided instead, #' additional information about the download will be provided if the number is #' greater than 1. -#' +#' #' @return This function returns the path of the folder where the files have #' been stored #' #' @source \url{https://worldclim.org/data/worldclim21.html} #' @examples -#' +#' #' ## To download the monthly precipitation #' ## in your current working #' ## directory, just type: -#' ## getprecip() +#' ## getprecip(path = "~/Downloads/") #' ## Mind that the file weights ca. 1GB! #' @export getprecip <- function(path = NULL, overwrite = FALSE, - verbose = interactive() -) { - + verbose = interactive()) { ## check options old_opts <- options() if (options()$timeout == 60) { @@ -197,44 +195,45 @@ getprecip <- function(path = NULL, message("You are using a custom value of timeout lower than 600s. This could be unsufficient for downloading the large file. If the download crashes as a result, please call `options(timeout = XX)` with XX the number of seconds you want to allow R waiting for the download to be completed before crashing. After this, rerun `getprecip()`.") } on.exit(options(old_opts)) - + ## Use current directory if path is missing if (is.null(path)) { path <- getwd() } - + ## Normalise path the remove last slash path <- base::normalizePath(path, mustWork = FALSE) - + ## Define web address and file name address_precip <- "https://biogeo.ucdavis.edu/data/worldclim/v2.1/base/wc2.1_30s_prec.zip" filename_precip <- "wc2.1_30s_prec.zip" - + ## Define md5sum ## (created with tools::md5sum("wc2.1_30s_prec.zip")) md5sum_precip <- "cc100350d034883c9e925c903fb3c7c3" - + ## Download and check file - path_to_zip <- downloadfile(address = address_precip, - filename = filename_precip, - path = path, - overwrite = overwrite, - md5sum = md5sum_precip, - verbose = verbose + path_to_zip <- downloadfile( + address = address_precip, + filename = filename_precip, + path = path, + overwrite = overwrite, + md5sum = md5sum_precip, + verbose = verbose ) - + ## Unzip the file if (verbose > 0) { print("unzipping in progress...", quote = FALSE) } outpath <- paste0(path, "/wc2.1_30s_prec") utils::unzip(path_to_zip, exdir = outpath) - + if (verbose > 0) { print("unzipping done!", quote = FALSE) print(paste("The files can be found in the folder", outpath), quote = FALSE) } - + return(invisible(outpath)) } @@ -270,29 +269,27 @@ getprecip <- function(path = NULL, #' @export #' #' @seealso [getelev()], [getprecip()] -#' -#' +#' +#' downloadfile <- function(address = NULL, filename = NULL, path = NULL, - overwrite = FALSE, md5sum = NULL, verbose = interactive() -) { - + overwrite = FALSE, md5sum = NULL, verbose = interactive()) { if (verbose > 0) { print(paste("the function attempts to download", filename, "from internet"), quote = FALSE) } - + ## Change internet options to display more information opt_ori <- options()$internet.info if (verbose > 1) options(internet.info = 1) - + ## Use current directory if path is missing if (is.null(path)) { path <- getwd() } - + ## Turning path into canonical form ## (this avoids the problem of having terminal slash or not) path <- normalizePath(path, mustWork = FALSE) - + ## Create directory if missing if (!dir.exists(path)) { if (verbose > 0) { @@ -300,19 +297,19 @@ downloadfile <- function(address = NULL, filename = NULL, path = NULL, } dir.create(path, recursive = TRUE) } - + ## Conditional file download complete_path <- paste(path, filename, sep = "/") - if (file.exists(complete_path) & !overwrite) { - message(paste("the file", filename, "is already present in", path, - "so it won't be downloaded again unless you set the argument overwrite to TRUE" - ) - ) + if (file.exists(complete_path) && !overwrite) { + message(paste( + "the file", filename, "is already present in", path, + "so it won't be downloaded again unless you set the argument overwrite to TRUE" + )) } else { time <- system.time(utils::download.file(address, destfile = complete_path, mode = "wb")) message(paste0("The download lasted ", time[["elapsed"]], "s")) } - + ## Checking MD5sum if (!is.null(md5sum)) { if (tools::md5sum(complete_path) == md5sum) { @@ -321,15 +318,14 @@ downloadfile <- function(address = NULL, filename = NULL, path = NULL, warning("the file seems to be corructed (md5sums do not match). Try to download it again setting the argument overwrite to TRUE.") } } - + ## Display outcome if (verbose > 0) { print(paste("the file", filename, "is stored in the folder", path), quote = FALSE) } - + ## Restore original internet options options(internet.info = opt_ori) - + return(invisible(complete_path)) } - diff --git a/IsoriX/R/isofind.R b/IsoriX/R/isofind.R index 925819d..6d37dcd 100644 --- a/IsoriX/R/isofind.R +++ b/IsoriX/R/isofind.R @@ -1,7 +1,7 @@ #' Infer spatial origins -#' +#' #' This function performs the assignment of samples of unknown origins. -#' +#' #' An assignment is a comparison, for a given organism, of the predicted #' isotopic source value at its location of origin and the predicted isotopic #' source value at each location of the `isoscape`. The difference between @@ -19,9 +19,9 @@ #' statistical details about this procedure as well as a discussion of which #' uncertainties are captured and which are not, please refer to Courtiol et al. #' 2019. -#' +#' #' **Details on parameters:** -#' +#' #' - *neglect_covPredCalib*: as long as the calibration method used in #' [calibfit] is "wild", a covariance is expected between the #' uncertainty of predictions from the isoscape mean fit and the uncertainty in @@ -33,13 +33,13 @@ #' nonetheless recommend to set `neglect_covPredCalib` to `FALSE` in #' your final analysis. If the calibration method used in [calibfit] #' is not "wild", this parameter has no effect. -#' +#' #' - *mask*: a mask can be used so to remove all values falling in the mask. #' This can be useful for performing for example assignments on lands only and #' discard anything falling in large bodies of water (see example). By default #' our [OceanMask] is considered. Setting `mask` to NULL allows #' to prevent this automatic behaviour. -#' +#' #' @aliases isofind print.ISOFIND summary.ISOFIND #' @param data A *dataframe* containing the assignment data (see note below) #' @param isoscape The output of the function [isoscape] @@ -71,141 +71,156 @@ #' @references Courtiol A, Rousset F, Rohwäder M, Soto DX, Lehnert L, Voigt CC, Hobson KA, Wassenaar LI, Kramer-Schadt S (2019). Isoscape #' computation and inference of spatial origins with mixed models using the R package IsoriX. In Hobson KA, Wassenaar LI (eds.), #' Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. -#' +#' #' Fisher, R.A. (1925). Statistical Methods for Research Workers. #' Oliver and Boyd (Edinburgh). ISBN 0-05-002170-2. #' @keywords models regression #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 200) { -#' -#' ## We fit the models for Germany -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' GermanFit <- isofit(data = GNIPDataDEagg, -#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' -#' ## We build the isoscape -#' GermanScape <- isoscape(raster = ElevRasterDE, -#' isofit = GermanFit) -#' -#' -#' ## We fit the calibration model -#' CalibAlien <- calibfit(data = CalibDataAlien, -#' isofit = GermanFit) -#' -#' ## We perform the assignment on land only -#' AssignmentDry <- isofind(data = AssignDataAlien, -#' isoscape = GermanScape, -#' calibfit = CalibAlien) -#' -#' ## perform the assignment on land and water -#' Assignment <- isofind(data = AssignDataAlien, -#' isoscape = GermanScape, -#' calibfit = CalibAlien, -#' mask = NULL) -#' -#' ## We plot the group assignment -#' plot(Assignment, who = "group", mask = list(mask = NULL)) -#' -#' plot(AssignmentDry, who = "group", mask = list(mask = NULL)) -#' -#' ## We plot the assignment for the 8 first samples -#' plot(AssignmentDry, who = 1:8, -#' sources = list(draw = FALSE), -#' calibs = list(draw = FALSE)) -#' -#' ## We plot the assignment for the sample "Alien_10" -#' plot(AssignmentDry, who = "Alien_10") -#' -#' -#' ### Other example without calibration: -#' ### We will try to assign a weather station -#' ### in the water isoscape -#' -#' ## We create the assignment data taking -#' ## GARMISCH-PARTENKIRCHEN as the station to assign -#' GPIso <- GNIPDataDEagg[GNIPDataDEagg$source_ID == "GARMISCH-PARTENKIRCHEN", "mean_source_value"] -#' AssignDataGP <- data.frame(sample_value = GPIso, -#' sample_ID = "GARMISCH-PARTENKIRCHEN") -#' -#' ## We perform the assignment -#' AssignedGP <- isofind(data = AssignDataGP, -#' isoscape = GermanScape, -#' calibfit = NULL) -#' ## We plot the assignment and -#' ## show where the station really is (using lattice) -#' plot(AssignedGP) + -#' xyplot(47.48 ~ 11.06, -#' panel = panel.points, -#' cex = 5, pch = 13, lwd = 2, col = "black") -#' -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 200) { +#' ## We fit the models for Germany +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' GermanFit <- isofit( +#' data = GNIPDataDEagg, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' +#' ## We build the isoscape +#' GermanScape <- isoscape( +#' raster = ElevRasterDE, +#' isofit = GermanFit +#' ) +#' +#' +#' ## We fit the calibration model +#' CalibAlien <- calibfit( +#' data = CalibDataAlien, +#' isofit = GermanFit +#' ) +#' +#' ## We perform the assignment on land only +#' AssignmentDry <- isofind( +#' data = AssignDataAlien, +#' isoscape = GermanScape, +#' calibfit = CalibAlien +#' ) +#' +#' ## perform the assignment on land and water +#' Assignment <- isofind( +#' data = AssignDataAlien, +#' isoscape = GermanScape, +#' calibfit = CalibAlien, +#' mask = NULL +#' ) +#' +#' ## We plot the group assignment +#' plot(Assignment, who = "group", mask = list(mask = NULL)) +#' +#' plot(AssignmentDry, who = "group", mask = list(mask = NULL)) +#' +#' ## We plot the assignment for the 8 first samples +#' plot(AssignmentDry, +#' who = 1:8, +#' sources = list(draw = FALSE), +#' calibs = list(draw = FALSE) +#' ) +#' +#' ## We plot the assignment for the sample "Alien_10" +#' plot(AssignmentDry, who = "Alien_10") +#' +#' +#' ### Other example without calibration: +#' ### We will try to assign a weather station +#' ### in the water isoscape +#' +#' ## We create the assignment data taking +#' ## GARMISCH-PARTENKIRCHEN as the station to assign +#' GPIso <- GNIPDataDEagg[GNIPDataDEagg$source_ID == "GARMISCH-PARTENKIRCHEN", "mean_source_value"] +#' AssignDataGP <- data.frame( +#' sample_value = GPIso, +#' sample_ID = "GARMISCH-PARTENKIRCHEN" +#' ) +#' +#' ## We perform the assignment +#' AssignedGP <- isofind( +#' data = AssignDataGP, +#' isoscape = GermanScape, +#' calibfit = NULL +#' ) +#' ## We plot the assignment and +#' ## show where the station really is (using lattice) +#' plot(AssignedGP) + +#' xyplot(47.48 ~ 11.06, +#' panel = panel.points, +#' cex = 5, pch = 13, lwd = 2, col = "black" +#' ) #' } -#' +#' #' @export isofind <- function(data, isoscape, calibfit = NULL, mask = NA, neglect_covPredCalib = TRUE, - verbose = interactive() - ) { - + verbose = interactive()) { ### WE COMPUTE THE TEST STATISTIC if (verbose) { print("computing the test statistic and its variance...") } - + ### check for calibration data if (is.null(calibfit)) { warning( -"The assignment is computed directly on the isoscape -without using a calibration! This means that IsoriX -considers that you directly fitted the isoscape on + "The assignment is computed directly on the isoscape +without using a calibration! This means that IsoriX +considers that you directly fitted the isoscape on the same material as the material you are trying to assign. If this is not the case, rerun isofind() by providing a calibration object to the argument -calibfit!") +calibfit!" + ) } ## importing ocean if missing if (!is.null(mask) && !inherits(mask, "SpatVector") && is.na(mask)) { mask <- terra::readRDS(system.file("extdata/OceanMask.rds", package = "IsoriX")) } - + original_names <- as.character(data$sample_ID) names_layers <- gsub(" ", "_", original_names) names_layers <- gsub("-", ".", names_layers) - + if (any(names_layers != original_names)) { warning("Your sample_ID could not be used to name rasters (you may have used numbers, symbols or punctuations that is messing with the package terra), so they have been slightly modified by this package.") } - + time <- system.time({ if (!is.null(calibfit)) { - ## we predict the isotopic value at origin location + ## we predict the isotopic value at origin location data$mean_origin <- - (data$sample_value - calibfit$param["intercept"])/calibfit$param["slope"] + (data$sample_value - calibfit$param["intercept"]) / calibfit$param["slope"] ## we create individual rasters containing the test statistics - list_stat_layers <- sapply(1:nrow(data), ## ToDo: change that so it can use HD for heavy layers - function(i) { - data$mean_origin[i] - isoscape$isoscapes$mean - } - ) + list_stat_layers <- sapply( + seq_len(nrow(data)), ## ToDo: change that so it can use HD for heavy layers + function(i) { + data$mean_origin[i] - isoscape$isoscapes$mean + } + ) } else { ## we create individual rasters containing the test statistics - list_stat_layers <- sapply(1:nrow(data), - function(i) { - data$sample_value[i] - isoscape$isoscapes$mean - } - ) + list_stat_layers <- sapply( + seq_len(nrow(data)), + function(i) { + data$sample_value[i] - isoscape$isoscapes$mean + } + ) } names(list_stat_layers) <- names_layers stat_brick <- terra::rast(list_stat_layers) @@ -218,84 +233,83 @@ calibfit!") warning("Your sample_ID could not be used to name rasters (you may have used numbers, symbols or punctuations that is messing with the package terra), so they have been slightly modified by this package.") names_layers <- names(stat_brick) ## trick to track the good names as they can change during stacking (if numbers) } - + ### WE COMPUTE THE VARIANCE OF THE TEST if (!is.null(calibfit)) { - ## term 1 in eq. 9.18 from Courtiol et al. 2019 - var_term1 <- sapply(1:nrow(data), function(i) isoscape$isoscapes$mean_predVar) - + var_term1 <- sapply(seq_len(nrow(data)), function(i) isoscape$isoscapes$mean_predVar) + ## term 2 in eq. 9.18 from Courtiol et al. 2019 - var_term2 <- calibfit$phi/calibfit$param[["slope"]]^2 - + var_term2 <- calibfit$phi / calibfit$param[["slope"]]^2 + ## term 3 in eq. 9.18 from Courtiol et al. 2019 X <- cbind(1, data$mean_origin) fixedVar <- rowSums(X * (X %*% calibfit$fixefCov)) ## = diag(X %*% calibfit$fixefCov %*% t(X)) - var_term3 <- fixedVar/calibfit$param[["slope"]]^2 - + var_term3 <- fixedVar / calibfit$param[["slope"]]^2 + ## term 4 in eq. 9.18 from Courtiol et al. 2019 if (calibfit$method == "wild" && !neglect_covPredCalib) { # Create design matrix for fixed effects # note: the one stored in calibfit is defective given the use of offsets to fit the calibfit model X.pv <- cbind(1, calibfit$calib_fit$data$mean_source_value) - + X_ginv <- spaMM::get_matrix(calibfit$calib_fit, which = "fixef_left_ginv", X.pv = X.pv) # with the non-default X.pv; dimensions are 2*(number of calibration locations) ## which = "fixef_left_ginv" is not yet documented in spaMM ## => this call replaces: # phi <- spaMM::residVar(calibfit$calib_fit) # X_ginv <- tcrossprod(solve(spaMM:::.ZtWZwrapper(X.pv, 1/phi)), spaMM:::.Dvec_times_m_Matrix(1/phi, X.pv)) - + fix_X_ZAC.calib_positions <- spaMM::preprocess_fix_corr(calibfit$iso_fit$mean_fit, fixdata = calibfit$calib_fit$data) covmat <- spaMM::get_predCov_var_fix(calibfit$iso_fit$mean_fit, - newdata = attr(isoscape, "xs"), - fix_X_ZAC.object = fix_X_ZAC.calib_positions) - + newdata = attr(isoscape, "xs"), + fix_X_ZAC.object = fix_X_ZAC.calib_positions + ) + # account for the \beta factor in eq.19 covmat_scaled <- -calibfit$param[["slope"]] * covmat - + # matrix of row vectors of errors of the coefficients (eps_alpha, eps_beta) eps_abs <- tcrossprod(covmat_scaled, X_ginv) # dimensions: ( # of putative origins ) * 2 hat_delta_o <- terra::extract(isoscape$isoscapes$mean, attr(isoscape, "xs")[, c("long", "lat")]) - + # adding all components of term 4 - var_term4_vec <- eps_abs[, 1L] + eps_abs[, 2L]*hat_delta_o + var_term4_vec <- eps_abs[, 1L] + eps_abs[, 2L] * hat_delta_o } else { var_term4_vec <- 0 } - + # format as raster for coordinates to match - var_term4 <- .create_raster(long = terra::crds(isoscape$isoscapes)[, "x"], - lat = terra::crds(isoscape$isoscapes)[, "y"], - values = var_term4_vec, - proj = "+proj=longlat +datum=WGS84" - ) - + var_term4 <- .create_raster( + long = terra::crds(isoscape$isoscapes, na.rm = FALSE)[, "x"], + lat = terra::crds(isoscape$isoscapes, na.rm = FALSE)[, "y"], + values = var_term4_vec, + proj = "+proj=longlat +datum=WGS84" + ) + ## we create individual rasters containing the variance of the test statistics ## by summing all the terms - list_varstat_layers <- sapply(1:nrow(data), function(i) var_term1[[i]] + var_term2 + var_term3[i] + var_term4) - + list_varstat_layers <- sapply(seq_len(nrow(data)), function(i) var_term1[[i]] + var_term2 + var_term3[i] + var_term4) } else { ## end of if (!is.null(calibfit)) - + ## if no calibration, the variance of the test statistic is the response variance - list_varstat_layers <- sapply(1:nrow(data), function(i) isoscape$isoscapes$mean_respVar) - + list_varstat_layers <- sapply(seq_len(nrow(data)), function(i) isoscape$isoscapes$mean_respVar) } - if (exists("var_term1")) rm(var_term2) + if (exists("var_term1")) rm(var_term2) if (exists("var_term2")) rm(var_term2) if (exists("var_term3")) rm(var_term3) if (exists("var_term4")) rm(var_term4) - + names(list_varstat_layers) <- names_layers varstat_brick <- terra::rast(list_varstat_layers) - + if (terra::nlyr(varstat_brick) == 1) { names(varstat_brick) <- names_layers ## workaround since terra does not consider names if only 1 layer } - + rm(list_varstat_layers) - + ### WE COMPUTE THE INDIVIDUAL LOG P-VALUE SURFACES if (verbose) { print("running the assignment test...") @@ -303,21 +317,21 @@ calibfit!") ## we create individual rasters containing the p-values of the test list_logpv_layers <- sapply(names_layers, function(sample_ID) { - logpv_raster <- terra::rast(varstat_brick[[sample_ID]]) - terra::values(logpv_raster) <- .assign_test(terra::values(stat_brick[[sample_ID]]), terra::values(varstat_brick[[sample_ID]])) - return(logpv_raster) - }) - + logpv_raster <- terra::rast(varstat_brick[[sample_ID]]) + terra::values(logpv_raster) <- .assign_test(terra::values(stat_brick[[sample_ID]]), terra::values(varstat_brick[[sample_ID]])) + return(logpv_raster) + }) + ## we store the list as a brick logpv_brick <- terra::rast(list_logpv_layers) names(list_logpv_layers) <- names_layers - + rm(list_logpv_layers) - + if (terra::nlyr(logpv_brick) == 1) { names(logpv_brick) <- names_layers ## workaround since terra does not consider names if only 1 layer } - + ### WE COMBINE SAMPLE SURFACES USING FISHER'S METHOD if (verbose) { print("combining assignments across samples...") @@ -327,7 +341,9 @@ calibfit!") } else { group_pv <- .Fisher_method(logpv_brick) } - }) ## end of system.time + ## where isoscape values are missing, pv is NA + group_pv[is.na(isoscape$isoscapes$mean)] <- NA_real_ + }) ## end of system.time ## display time time <- round(as.numeric((time)[3])) @@ -341,7 +357,7 @@ calibfit!") } pv_brick <- exp(logpv_brick) rm(logpv_brick) - names(pv_brick) <- names_layers ## we restore the names as they are not kept when computing + names(pv_brick) <- names_layers ## we restore the names as they are not kept when computing ## replacing values by zeros if they fall in the mask (e.g. in water) if (!is.null(mask)) { @@ -352,24 +368,25 @@ calibfit!") ## turn mask into raster with NA inside polygons raster_mask <- is.na(terra::rasterize(mask, stat_brick)) - ## multiplying rasters by the raster_mask - stat_brick <- stat_brick*raster_mask + ## multiplying rasters by the raster_mask + stat_brick <- stat_brick * raster_mask names(stat_brick) <- names_layers ## we restore the names as they are not kept when computing - varstat_brick <- varstat_brick*raster_mask + varstat_brick <- varstat_brick * raster_mask names(varstat_brick) <- names_layers ## we restore the names as they are not kept when computing - pv_brick <- pv_brick*raster_mask + pv_brick <- pv_brick * raster_mask names(pv_brick) <- names_layers ## we restore the names as they are not kept when computing - + group_pv <- group_pv * raster_mask } ### spatial points - if (!is.null(data$lat) & !is.null(data$long)) { - assigns <- .create_spatial_points(long = data$long, - lat = data$lat, - proj = "+proj=longlat +datum=WGS84" + if (!is.null(data$lat) && !is.null(data$long)) { + assigns <- .create_spatial_points( + long = data$long, + lat = data$lat, + proj = "+proj=longlat +datum=WGS84" ) } else { assigns <- NULL @@ -381,29 +398,32 @@ calibfit!") calibs <- calibfit$sp_points$calibs } - out <- list(sample = list("stat" = stat_brick, - "stat_var" = varstat_brick, - "pv" = pv_brick - ), - group = list("pv" = group_pv), - sp_points = list("sources" = isoscape$sp_points$sources, - "calibs" = calibs, - "assigns" = assigns - ) - ) + out <- list( + sample = list( + "stat" = stat_brick, + "stat_var" = varstat_brick, + "pv" = pv_brick + ), + group = list("pv" = group_pv), + sp_points = list( + "sources" = isoscape$sp_points$sources, + "calibs" = calibs, + "assigns" = assigns + ) + ) class(out) <- c("ISOFIND", "list") - + if (verbose) { print("done!") } - + return(out) } .assign_test <- function(stats, vars, log_scale = TRUE) { - pv <- 2*(1 - stats::pnorm(abs(stats), mean = 0, sd = sqrt(vars))) + pv <- 2 * (1 - stats::pnorm(abs(stats), mean = 0, sd = sqrt(vars))) if (log_scale) { pv <- log(pv) } @@ -415,8 +435,8 @@ calibfit!") if (length(logpv) == 1) { return(exp(logpv)) } - Fisher_stat <- -2*sum(logpv, na.rm = TRUE) - df <- 2*length(logpv[!is.na(logpv)]) + Fisher_stat <- -2 * sum(logpv, na.rm = TRUE) + df <- 2 * length(logpv[!is.na(logpv)]) pv <- stats::pchisq(q = Fisher_stat, df = df, lower.tail = FALSE) return(pv) } @@ -435,7 +455,6 @@ summary.ISOFIND <- function(object, ...) { cat(paste("######### assignment raster(s): '", i, "'"), "\n") print(object[[i]]) cat("\n") - } + } return(invisible(NULL)) } - diff --git a/IsoriX/R/isofit.R b/IsoriX/R/isofit.R index f43d9c9..7b4f0fb 100644 --- a/IsoriX/R/isofit.R +++ b/IsoriX/R/isofit.R @@ -139,7 +139,7 @@ #' disp_fit object: mean_fit is not fitted independently from disp_fit. #' #' For all methods, fixed effects are being estimated by Maximum Likelihood -#' (ML) and dispersion parameters (i.e. random effects and Matern correlation +#' (ML) and dispersion parameters (i.e. random effects and Matérn correlation #' parameters) are estimated by Restricted Maximum Likelihood (REML). Using #' REML provides more accurate prediction intervals but impedes the accuracy #' of Likelihood Ratio Tests (LRT). Our choice for REML was motivated by the @@ -167,7 +167,7 @@ #' #' @references Courtiol, A., Rousset, F. (2017). Modelling isoscapes using mixed #' models. \url{https://www.biorxiv.org/content/10.1101/207662v1} -#' +#' #' Courtiol A, Rousset F, Rohwäder M, Soto DX, Lehnert L, Voigt CC, Hobson KA, Wassenaar LI, Kramer-Schadt S (2019). Isoscape #' computation and inference of spatial origins with mixed models using the R package IsoriX. In Hobson KA, Wassenaar LI (eds.), #' Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. @@ -181,31 +181,29 @@ #' @source \url{https://kimura.univ-montp2.fr/~rousset/spaMM.htm} #' @keywords models regression #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 10) { -#' -#' ## Fitting the models for Germany -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' GermanFit <- isofit(data = GNIPDataDEagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' GermanFit -#' -#' ## Diagnostics for the fits -#' plot(GermanFit) -#' -#' ## Exploration of the fitted models -#' GermanFit$mean_fit -#' GermanFit$disp_fit -#' AIC(GermanFit$disp_fit) -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 10) { +#' ## Fitting the models for Germany +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' GermanFit <- isofit(data = GNIPDataDEagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) +#' +#' GermanFit +#' +#' ## Diagnostics for the fits +#' plot(GermanFit) +#' +#' ## Exploration of the fitted models +#' GermanFit$mean_fit +#' GermanFit$disp_fit +#' AIC(GermanFit$disp_fit) #' } -#' +#' #' @export isofit <- function(data, mean_model_fix = list(elev = FALSE, lat_abs = FALSE, lat_2 = FALSE, long = FALSE, long_2 = FALSE), @@ -217,9 +215,7 @@ isofit <- function(data, dist_method = "Earth", ## or: "Euclidean" control_mean = list(), control_disp = list(), - verbose = interactive() - ) { - + verbose = interactive()) { ## Complete the arguments .complete_args(isofit) @@ -237,24 +233,24 @@ isofit <- function(data, } ## Partially check that the different arguments are compatible between each others - if (sum(unlist(mean_model_rand)) == 0 & (spaMM_method$mean_model == "corrHLfit" | spaMM_method$disp_model == "corrHLfit")) { + if (sum(unlist(mean_model_rand)) == 0 && (spaMM_method$mean_model == "corrHLfit" || spaMM_method$disp_model == "corrHLfit")) { stop("Your call does not make sense: the spaMM_method 'corrHLfit' should only be used when random effects are present.") } - if (mean_model_rand[[2]] & spaMM_method$mean_model == "HLfit") { + if (mean_model_rand[[2]] && spaMM_method$mean_model == "HLfit") { stop("Your call does not make sense: the spaMM_method 'HLfit' should only be used when no spatial random effects are present.") } - if (disp_model_rand[[2]] & spaMM_method$disp_model == "HLfit") { + if (disp_model_rand[[2]] && spaMM_method$disp_model == "HLfit") { stop("Your call does not make sense: the spaMM_method 'HLfit' should only be used when no spatial random effects are present.") } - if (!mean_model_rand[[2]] & spaMM_method$mean_model == "corrHLfit") { + if (!mean_model_rand[[2]] && spaMM_method$mean_model == "corrHLfit") { stop("Your call does not make sense: the spaMM_method 'corrHLfit' should only be used when a spatial random effects is present.") } - if (!disp_model_rand[[2]] & spaMM_method$disp_model == "corrHLfit") { + if (!disp_model_rand[[2]] && spaMM_method$disp_model == "corrHLfit") { stop("Your call does not make sense: the spaMM_method 'corrHLfit' should only be used when a spatial random effects is present.") } - if (!mean_model_rand$spatial & !disp_model_rand$spatial & - all(unlist(uncorr_terms) != c("lambda", "lambda")) - ) { + if (!mean_model_rand$spatial && !disp_model_rand$spatial && + all(unlist(uncorr_terms) != c("lambda", "lambda")) + ) { stop("In the absence of spatial random effects, only 'lambda' can be used as uncorr_terms.") } @@ -262,15 +258,15 @@ isofit <- function(data, ## Prepare the dataset data <- .prepare_data_sources(data) - ## Define the formulas for each model + ## Define the formulas for each model mean_formula <- .prepare_formula("mean_source_value ~ 1", - fix = mean_model_fix, rand = mean_model_rand, - rand_p = uncorr_terms$mean_model - ) + fix = mean_model_fix, rand = mean_model_rand, + rand_p = uncorr_terms$mean_model + ) disp_formula <- .prepare_formula("var_source_value ~ 1", - fix = disp_model_fix, rand = disp_model_rand, - rand_p = uncorr_terms$disp_model - ) + fix = disp_model_fix, rand = disp_model_rand, + rand_p = uncorr_terms$disp_model + ) ## Define weights data$weights_mean <- as.numeric(data$n_source_value) @@ -278,19 +274,21 @@ isofit <- function(data, if (all(data$weights_disp < 1)) { stop("The variable 'n_source_value' must have some observations > 1 to fit the residual dispersion model.") } - - ## Define the baseline argument lists for the models irrespective of the spaMM_method - args_dispfit <- list(formula = stats::formula(disp_formula), - family = stats::Gamma(log), - prior.weights = data$weights_disp, - data = data - ) - args_meanfit <- list(formula = stats::formula(mean_formula), - prior.weights = data$weights_mean, - resid.model = list(formula = ~ 0 + offset(pred_disp), family = stats::Gamma(identity)), - data = data - ) + ## Define the baseline argument lists for the models irrespective of the spaMM_method + args_dispfit <- list( + formula = stats::formula(disp_formula), + family = stats::Gamma(log), + prior.weights = data$weights_disp, + data = data + ) + + args_meanfit <- list( + formula = stats::formula(mean_formula), + prior.weights = data$weights_mean, + resid.model = list(formula = ~ 0 + offset(pred_disp), family = stats::Gamma(identity)), + data = data + ) ## Inclusion of additional arguments for corrHLfit, if necessary if (spaMM_method[1] == "corrHLfit") { @@ -335,13 +333,24 @@ isofit <- function(data, } ## Fit disp_fit - time_disp <- system.time(disp_fit <- do.call(eval(parse(text = paste0("spaMM::", spaMM_method$disp_model))), - c(args_dispfit, control_disp) - ) - ) + time_disp <- system.time(disp_fit <- do.call( + eval(parse(text = paste0("spaMM::", spaMM_method$disp_model))), + c(args_dispfit, control_disp) + )) ## Predict the values for the residual variance - args_meanfit$data$pred_disp <- spaMM::predict.HLfit(disp_fit, newdata = data)[, 1] + pred_disp_obj <- .safe_and_quiet_predictions(disp_fit, newdata = data) + args_meanfit$data$pred_disp <- pred_disp_obj$result[, 1] + + if (length(pred_disp_obj$messages) > 0) { + message("The following messages were produced by the predictions of residual dispersion: ") + message(pred_disp_obj$messages) + } + + if (length(pred_disp_obj$warnings) > 0) { + message("The following warnings were produced by the predictions of mean isotopic values: ") + message(pred_disp_obj$warnings) + } ## Interactive display if (verbose) { @@ -352,10 +361,10 @@ isofit <- function(data, } ## Fit mean_fit - time_mean <- system.time(mean_fit <- do.call(eval(parse(text = paste0("spaMM::", spaMM_method$mean_model))), - c(args_meanfit, control_mean) - ) - ) + time_mean <- system.time(mean_fit <- do.call( + eval(parse(text = paste0("spaMM::", spaMM_method$mean_model))), + c(args_meanfit, control_mean) + )) ## Interactive display of fit time duration total_time <- round(as.numeric((time_mean + time_disp)[3])) @@ -369,7 +378,7 @@ isofit <- function(data, ## Create the return object out <- list("mean_fit" = mean_fit, "disp_fit" = disp_fit, "info_fit" = info_fit) - + class(out) <- c("ISOFIT", "list") return(invisible(out)) @@ -404,24 +413,25 @@ isofit <- function(data, #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## We prepare the GNIP monthly data between January and June for Germany -#' -#' GNIPDataDEmonthly <- prepsources(data = GNIPDataDE, -#' month = 1:6, -#' split_by = "month") -#' -#' head(GNIPDataDEmonthly) -#' -#' ## We fit the isoscapes -#' -#' GermanMonthlyFit <- isomultifit(data = GNIPDataDEmonthly) -#' -#' GermanMonthlyFit -#' -#' plot(GermanMonthlyFit) +#' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We prepare the GNIP monthly data between January and June for Germany +#' +#' GNIPDataDEmonthly <- prepsources( +#' data = GNIPDataDE, +#' month = 1:6, +#' split_by = "month" +#' ) +#' +#' head(GNIPDataDEmonthly) +#' +#' ## We fit the isoscapes +#' +#' GermanMonthlyFit <- isomultifit(data = GNIPDataDEmonthly) +#' +#' GermanMonthlyFit +#' +#' plot(GermanMonthlyFit) #' } #' @export isomultifit <- function(data, @@ -435,24 +445,22 @@ isomultifit <- function(data, dist_method = "Earth", ## or: "Euclidean" control_mean = list(), control_disp = list(), - verbose = interactive() -) { - + verbose = interactive()) { ## Complete the arguments .complete_args(isomultifit) - + ## Save the call information info_multifit <- info_fit <- mget(names(formals())) info_multifit$IsoriX_version <- utils::packageDescription("IsoriX")$Version info_multifit$verbose <- verbose - + if (is.null(data[, split_by])) { - stop(paste("You used 'split_by =", split_by, "' but no column called ',", split_by, "' is found in 'data'...")) + stop(paste("You used 'split_by =", split_by, "' but no column called ',", split_by, "' is found in 'data'...")) } - + ## Prepare arguments for call(s) to isofit - info_fit$split_by <- info_fit$weighting <- NULL ## removes arguments unknown to isofit - + info_fit$split_by <- info_fit$weighting <- NULL ## removes arguments unknown to isofit + ## Trivial case if no splitting is done if (is.null(split_by)) { return(do.call(isofit, info_fit)) @@ -465,7 +473,7 @@ isomultifit <- function(data, } ## Run all fits - info_fit$verbose <- FALSE ## no display for each fit + info_fit$verbose <- FALSE ## no display for each fit total_time <- system.time({ multi_fits <- lapply(unique(data[, split_by]), function(s) { info_fit$data <- data[data[, split_by] == s, ] @@ -474,24 +482,24 @@ isomultifit <- function(data, print(paste("fit of the pair of models for", split_by, s, "done"), quote = FALSE) } return(fit) - }) + }) }) names(multi_fits) <- paste(split_by, unique(data[, split_by]), sep = "_") - + ## Interactive display if (verbose) { print(paste("Done!"), quote = FALSE) print(paste0("All models have been fitted in ", round(as.numeric((total_time)[3])), "s."), quote = FALSE) } - + ## Store the time info_multifit$time_fit <- total_time - + ## Create the return object out <- list("multi_fits" = multi_fits, "info_fit" = info_multifit) - + class(out) <- c("MULTIISOFIT", "ISOFIT", "list") - + return(invisible(out)) } @@ -514,7 +522,7 @@ isomultifit <- function(data, if (!is.null(data$source_ID)) { data$source_ID <- factor(data$source_ID) } - + data$lat_abs <- abs(data$lat) data$lat_2 <- data$lat^2 data$long_2 <- data$long^2 @@ -522,7 +530,7 @@ isomultifit <- function(data, } -.prepare_formula <- function(base_formula, fix, rand, rand_p){ +.prepare_formula <- function(base_formula, fix, rand, rand_p) { ## This function should not be called by the user but is itself called by other functions. ## It prepares formulas for the fitting procedures. if (fix$elev) { @@ -540,7 +548,7 @@ isomultifit <- function(data, if (fix$long_2) { base_formula <- paste(base_formula, "+ long_2") } - if (rand$uncorr & rand_p == "lambda") { + if (rand$uncorr && rand_p == "lambda") { base_formula <- paste(base_formula, "+ (1|source_ID)") } if (rand$spatial) { @@ -574,12 +582,12 @@ summary.ISOFIT <- function(object, ...) { cat(paste("[models fitted with spaMM version ", object$mean_fit$spaMM.version, "]", sep = ""), "\n") cat("\n") } else { - for (fit in 1:length(object$multi_fits)) { + for (fit in seq_along(object$multi_fits)) { cat("\n") cat(paste("##### Pair of models", names(object$multi_fits)[fit]), "#####") cat("\n") Recall(object$multi_fits[[fit]]) - } + } } return(invisible(NULL)) } diff --git a/IsoriX/R/isoscape.R b/IsoriX/R/isoscape.R index d51ed26..6c1a0a7 100644 --- a/IsoriX/R/isoscape.R +++ b/IsoriX/R/isoscape.R @@ -22,7 +22,7 @@ #' Let us summarize the meaning of `mean`, `mean_predVar`, #' `mean_residVar` and `mean_respVar` (see Courtiol & Rousset 2017 and #' Courtiol et al. 2019 for more details): -#' +#' #' Our model assumes that that there is a single true unknown isoscape, which is #' fixed but which is represented by the mixed-effect model as a random draw #' from possible realizations of isoscapes (random draws of the @@ -64,7 +64,7 @@ #' #' @references Courtiol, A., Rousset, F. (2017). Modelling isoscapes using mixed #' models. \url{https://www.biorxiv.org/content/10.1101/207662v1} -#' +#' #' Courtiol A, Rousset F, Rohwäder M, Soto DX, Lehnert L, Voigt CC, Hobson KA, Wassenaar LI, Kramer-Schadt S (2019). Isoscape #' computation and inference of spatial origins with mixed models using the R package IsoriX. In Hobson KA, Wassenaar LI (eds.), #' Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. @@ -76,215 +76,264 @@ #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## We prepare the data -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' ## We fit the models -#' GermanFit <- isofit(data = GNIPDataDEagg, -#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' ## We build the isoscapes -#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -#' -#' GermanScape -#' plot(GermanScape) -#' -#' ## We build more plots -#' PlotMean <- plot(x = GermanScape, which = "mean", plot = FALSE) -#' -#' PlotMeanPredVar <- plot(x = GermanScape, which = "mean_predVar", plot = FALSE) -#' -#' PlotMeanResidVar <- plot(x = GermanScape, which = "mean_residVar", plot = FALSE) -#' -#' PlotMeanRespVar <- plot(x = GermanScape, which = "mean_respVar", plot = FALSE) -#' -#' ## We display the plots -#' print(PlotMean, split = c(1, 1, 2, 2), more = TRUE) -#' print(PlotMeanPredVar, split = c(2, 1, 2, 2), more = TRUE) -#' print(PlotMeanResidVar, split = c(1, 2, 2, 2), more = TRUE) -#' print(PlotMeanRespVar, split = c(2, 2, 2, 2), more = FALSE) -#' -#' ## We build a sphere with our isoscape -#' plot(x = GermanScape, which = "mean", plot = FALSE, sphere = list(build = TRUE)) -#' -#' ## We can save a rotating sphere with the isoscape as a .gif-file. -#' ## This file will be located inside your working directory. -#' ## Make sure your current rgl device (from the previous step) is still open -#' ## and that you have both the packages 'rgl' and 'magick' installed. -#' ## The building of the .gif implies to create temporarily many .png -#' ## but those will be removed automatically once the .gif is done. -#' ## Uncomment to proceed (after making sure you have rgl, magick & webshot2 installed) -#' #if(require("rgl") && require("magick") && require("webshot2")) { -#' # movie3d(spin3d(axis = c(0, 0, 1), rpm = 2), duration = 30, dir = getwd()) -#' #} -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We prepare the data +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' ## We fit the models +#' GermanFit <- isofit( +#' data = GNIPDataDEagg, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' ## We build the isoscapes +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' +#' GermanScape +#' plot(GermanScape) +#' +#' ## We build more plots +#' PlotMean <- plot(x = GermanScape, which = "mean", plot = FALSE) +#' +#' PlotMeanPredVar <- plot(x = GermanScape, which = "mean_predVar", plot = FALSE) +#' +#' PlotMeanResidVar <- plot(x = GermanScape, which = "mean_residVar", plot = FALSE) +#' +#' PlotMeanRespVar <- plot(x = GermanScape, which = "mean_respVar", plot = FALSE) +#' +#' ## We display the plots +#' print(PlotMean, split = c(1, 1, 2, 2), more = TRUE) +#' print(PlotMeanPredVar, split = c(2, 1, 2, 2), more = TRUE) +#' print(PlotMeanResidVar, split = c(1, 2, 2, 2), more = TRUE) +#' print(PlotMeanRespVar, split = c(2, 2, 2, 2), more = FALSE) +#' +#' ## We build a sphere with our isoscape +#' plot(x = GermanScape, which = "mean", plot = FALSE, sphere = list(build = TRUE)) +#' +#' ## We can save a rotating sphere with the isoscape as a .gif-file. +#' ## This file will be located inside your working directory. +#' ## Make sure your current rgl device (from the previous step) is still open +#' ## and that you have both the packages 'rgl' and 'magick' installed. +#' ## The building of the .gif implies to create temporarily many .png +#' ## but those will be removed automatically once the .gif is done. +#' ## Uncomment to proceed (after making sure you have rgl, magick & webshot2 installed) +#' # if(require("rgl") && require("magick") && require("webshot2")) { +#' # movie3d(spin3d(axis = c(0, 0, 1), rpm = 2), duration = 30, dir = getwd()) +#' # } #' } -#' +#' #' @export isoscape <- function(raster, isofit, verbose = interactive()) { - if (inherits(isofit, "MULTIISOFIT")) { stop("Object 'isofit' of class MULTIISOFIT; use isomultiscape instead!") } - + if (verbose) { print("Building the isoscapes... ", quote = FALSE) print("(this may take a while)", quote = FALSE) } - + if (isofit$mean_fit$spaMM.version != utils::packageVersion(pkg = "spaMM")) { warning("The isofit has been fitted on a different version of spaMM than the one called by IsoriX. This may create troubles in paradize...") } - + time <- system.time({ - ## we extract lat/long from all cells of the raster - coord <- terra::crds(raster) - long_to_do <- coord[, "x"] # extract the longitude - lat_to_do <- coord[, "y"] # extract the lattitude - rm(coord); gc() ## remove coord as it can be a large object - + coord <- terra::crds(raster, na.rm = FALSE) + long_to_do <- coord[, "x"] # extract the longitude + lat_to_do <- coord[, "y"] # extract the lattitude + rm(coord) + gc() ## remove coord as it can be a large object + ## size of chunks to split the job into smaller ones chunk_size_for_predict <- 1000L - + ## indexes of beginning of each chunk (- 1) and of last position are being computed - steps <- unique(c(seq(from = 0L, to = length(long_to_do), by = chunk_size_for_predict), - length(long_to_do))) - + steps <- unique(c( + seq(from = 0L, to = length(long_to_do), by = chunk_size_for_predict), + length(long_to_do) + )) + ## a logical indicating if a progression bar must be used draw_pb <- interactive() & (length(steps) - 1) > 2 - + ## create empty vectors to store predictions mean_pred <- disp_pred <- rep(NA, length(long_to_do)) mean_predVar <- mean_residVar <- mean_respVar <- mean_pred disp_predVar <- disp_residVar <- disp_respVar <- disp_pred - + ## initiate the progress bar if (draw_pb) { pb <- utils::txtProgressBar(style = 3) } - + ## we build xs non-specifically using most complex model definition ## (it may look ugly but it should not increase much the computation time ## and it avoids a lot of uglier code) - xs <- data.frame(long = long_to_do, - long_2 = long_to_do^2, - lat = lat_to_do, - lat_abs = abs(lat_to_do), - lat_2 = lat_to_do^2, - elev = terra::extract(raster, cbind(long_to_do, lat_to_do))[[1]], ## ToDo: check that it is elev and not something else - source_ID = as.factor(paste("new", seq_len(length(long_to_do)), sep = "_"))) - - + xs <- data.frame( + long = long_to_do, + long_2 = long_to_do^2, + lat = lat_to_do, + lat_abs = abs(lat_to_do), + lat_2 = lat_to_do^2, + elev = terra::extract(raster, cbind(long_to_do, lat_to_do))[[1]], ## ToDo: check that it is elev and not something else + source_ID = as.factor(paste("new", seq_len(length(long_to_do)), sep = "_")) + ) + + ## we loop on each chunk + messages_meanpred <- character() + messages_disppred <- character() + warnings_meanpred <- character() + warnings_disppred <- character() + for (i in 1:(length(steps) - 1)) { - ## compute indexes for covariate values matching the current chunk - within_steps <- (steps[i] + 1L):steps[i + 1L] - + within_steps <- (steps[i] + 1L):steps[i + 1L] + ## we select the chunk of the design matrix required for the loop xs_small <- xs[within_steps, ] - + ## predictions from disp_fit - pred_dispfit <- .suppress_warning(spaMM::predict.HLfit(object = isofit$disp_fit, - newdata = xs_small, - variances = list(respVar = TRUE)), - warn = "Prior weights are not taken in account in residVar computation.") - + pred_dispfit <- .safe_and_quiet_predictions( + object = isofit$disp_fit, + newdata = xs_small, + variances = list(respVar = TRUE) + ) + + messages_disppred <- c(messages_disppred, pred_dispfit$messages) + warnings_disppred <- c(warnings_disppred, pred_dispfit$warnings) + pred_dispfit <- pred_dispfit$result + + index_disp <- as.integer(rownames(pred_dispfit)) ## need since predictions drop values if NA in predictors + ## transmission of phi to mean_fit - xs_small$pred_disp <- pred_dispfit[, 1] - + xs_small[rownames(pred_dispfit), "pred_disp"] <- pred_dispfit[, 1] + ## predictions from mean_fit - pred_meanfit <- .suppress_warning(spaMM::predict.HLfit(object = isofit$mean_fit, - newdata = xs_small, - variances = list(respVar = TRUE), - warn = "phi dispVar component not yet available for phi model != ~1.") + pred_meanfit <- .safe_and_quiet_predictions( + object = isofit$mean_fit, + newdata = xs_small, + variances = list(respVar = TRUE) ) - + + messages_meanpred <- c(messages_meanpred, pred_meanfit$messages) + warnings_meanpred <- c(warnings_meanpred, pred_meanfit$warnings) + pred_meanfit <- pred_meanfit$result + + index_mean <- as.integer(rownames(pred_meanfit)) ## need since predictions drop values if NA in predictors + ## we save the predictions - mean_pred[within_steps] <- pred_meanfit[, 1] - mean_predVar[within_steps] <- attr(pred_meanfit, "predVar") - mean_residVar[within_steps] <- attr(pred_meanfit, "residVar") ## same as disp_pred (as it should be) - mean_respVar[within_steps] <- attr(pred_meanfit, "respVar") - - disp_pred[within_steps] <- pred_dispfit[, 1] - disp_predVar[within_steps] <- attr(pred_dispfit, "predVar") ## same as mean_residVar (as it should be) - disp_residVar[within_steps] <- attr(pred_dispfit, "residVar") - disp_respVar[within_steps] <- attr(pred_dispfit, "respVar") - + mean_pred[index_mean] <- pred_meanfit[, 1] + mean_predVar[index_mean] <- attr(pred_meanfit, "predVar") + mean_residVar[index_mean] <- attr(pred_meanfit, "residVar") ## same as disp_pred (as it should be) + mean_respVar[index_mean] <- attr(pred_meanfit, "respVar") + + disp_pred[index_disp] <- pred_dispfit[, 1] + disp_predVar[index_disp] <- attr(pred_dispfit, "predVar") ## same as mean_residVar (as it should be) + disp_residVar[index_disp] <- attr(pred_dispfit, "residVar") + disp_respVar[index_disp] <- attr(pred_dispfit, "respVar") + if (draw_pb) { - utils::setTxtProgressBar(pb, steps[i + 1L]/length(lat_to_do)) ## update progress bar + utils::setTxtProgressBar(pb, steps[i + 1L] / length(lat_to_do)) ## update progress bar } - - } ## we leave the loop on chunks - + } ## we leave the loop on chunks + ## the progress bar is being closed if (draw_pb) close(pb) - }) ## end of system.time - + }) ## end of system.time + + messages_meanpred <- unique(messages_meanpred) + messages_disppred <- unique(messages_disppred) + warnings_meanpred <- unique(warnings_meanpred) + warnings_disppred <- unique(warnings_disppred) + + if (length(messages_meanpred) > 0) { + message("The following messages were produced by the predictions of mean isotopic values: ") + message(messages_meanpred) + } + + if (length(messages_disppred) > 0) { + message("The following messages were produced by the predictions of residual dispersion: ") + message(messages_disppred) + } + + if (length(warnings_meanpred) > 0) { + message("The following warnings were produced by the predictions of mean isotopic values: ") + message(warnings_meanpred) + } + + if (length(warnings_disppred) > 0) { + message("The following warnings were produced by the predictions of residual dispersion: ") + message(warnings_disppred) + } + ## display time time <- round(as.numeric((time)[3])) if (verbose) { - print(paste0("predictions for all ", length(long_to_do), - " locations have been computed in ", time, "s."), quote = FALSE) + print(paste0( + "predictions for all ", length(long_to_do), + " locations have been computed in ", time, "s." + ), quote = FALSE) } - + ## we store the predictions for mean isotopic values into a raster save_raster <- function(x) { - .create_raster(long = long_to_do, - lat = lat_to_do, - values = x, - proj = "+proj=longlat +datum=WGS84" + .create_raster( + long = long_to_do, + lat = lat_to_do, + values = x, + proj = "+proj=longlat +datum=WGS84" ) } - + mean_raster <- save_raster(mean_pred) mean_predVar_raster <- save_raster(mean_predVar) mean_residVar_raster <- save_raster(mean_residVar) mean_respVar_raster <- save_raster(mean_respVar) - + disp_raster <- save_raster(disp_pred) disp_predVar_raster <- save_raster(disp_predVar) disp_residVar_raster <- save_raster(disp_residVar) disp_respVar_raster <- save_raster(disp_respVar) - - + + ## we create the spatial points for sources - source_points <- .create_spatial_points(long = isofit$mean_fit$data$long, - lat = isofit$mean_fit$data$lat, - proj = "+proj=longlat +datum=WGS84" + source_points <- .create_spatial_points( + long = isofit$mean_fit$data$long, + lat = isofit$mean_fit$data$lat, + proj = "+proj=longlat +datum=WGS84" ) - + ## we put all rasters in a brick - isoscapes <- terra::rast(list("mean" = mean_raster, - "mean_predVar" = mean_predVar_raster, - "mean_residVar" = mean_residVar_raster, - "mean_respVar" = mean_respVar_raster, - "disp" = disp_raster, - "disp_predVar" = disp_predVar_raster, - "disp_residVar" = disp_residVar_raster, - "disp_respVar" = disp_respVar_raster - ) - ) - + isoscapes <- terra::rast(list( + "mean" = mean_raster, + "mean_predVar" = mean_predVar_raster, + "mean_residVar" = mean_residVar_raster, + "mean_respVar" = mean_respVar_raster, + "disp" = disp_raster, + "disp_predVar" = disp_predVar_raster, + "disp_residVar" = disp_residVar_raster, + "disp_respVar" = disp_respVar_raster + )) + ## we put the brick in a list that also contains ## the spatial points for the sources - out <- list(isoscapes = isoscapes, - sp_points = list(sources = source_points) + out <- list( + isoscapes = isoscapes, + sp_points = list(sources = source_points) ) - + ## store design matrix as attribute attr(out, "xs") <- xs - + ## we define a new class class(out) <- c("ISOSCAPE", "list") - + return(out) } @@ -292,119 +341,127 @@ isoscape <- function(raster, isofit, verbose = interactive()) { # Predicts the spatial distribution of isotopic values - # + # # This function is an alternative implementation of isoscape(). # It is not exported but may be put in use in a future version of IsoriX. # It does not compute the predictions into chunks. - + if (inherits(isofit, "MULTIISOFIT")) { stop("object 'isofit' of class MULTIISOFIT; use isomultiscape instead.") } - + if (verbose) { print("Building the isoscapes... ", quote = FALSE) print("(this may take a while)", quote = FALSE) } - + if (isofit$mean_fit$spaMM.version != utils::packageVersion(pkg = "spaMM")) { warning("The isofit has been fitted on a different version of spaMM than the one called by IsoriX. This may create troubles in paradize...") } - + time <- system.time({ - ## we extract lat/long from all cells of the raster - coord <- terra::crds(raster) - + coord <- terra::crds(raster, na.rm = FALSE) + ## we create the object for newdata - xs <- data.frame(long = coord[, 1], - long_2 = coord[, 1]^2, - lat = coord[, 2], - lat_abs = abs(coord[, 2]), - lat_2 = coord[, 2]^2, - elev = terra::extract(raster, coord)[[1]], ## ToDo: check that it is elev - source_ID = as.factor(paste("new", 1:nrow(coord), sep = "_")) + xs <- data.frame( + long = coord[, 1], + long_2 = coord[, 1]^2, + lat = coord[, 2], + lat_abs = abs(coord[, 2]), + lat_2 = coord[, 2]^2, + elev = terra::extract(raster, coord)[[1]], ## ToDo: check that it is elev + source_ID = as.factor(paste("new", seq_len(nrow(coord)), sep = "_")) ) - - rm(coord); gc() ## remove coord as it can be a large object - - pred_dispfit <- spaMM::predict.HLfit(object = isofit$disp_fit, - newdata = xs, - variances = list(respVar = TRUE), - blockSize = 16000L + + rm(coord) + gc() ## remove coord as it can be a large object + + pred_dispfit <- .safe_and_quiet_predictions( + object = isofit$disp_fit, + newdata = xs, + variances = list(respVar = TRUE), + blockSize = 16000L ) - + ## transmission of phi to mean_fit xs$pred_disp <- pred_dispfit[, 1] - + ## predictions from mean_fit - pred_meanfit <- spaMM::predict.HLfit(object = isofit$mean_fit, - newdata = xs, - variances = list(respVar = TRUE), - blockSize = 16000L + pred_meanfit <- .safe_and_quiet_predictions( + object = isofit$mean_fit, + newdata = xs, + variances = list(respVar = TRUE), + blockSize = 16000L ) - + mean_pred <- pred_meanfit[, 1] - mean_predVar <- attr(pred_meanfit, "predVar") + mean_predVar <- attr(pred_meanfit, "predVar") mean_residVar <- attr(pred_meanfit, "residVar") ## same as disp_pred (as it should be) - mean_respVar <- attr(pred_meanfit, "respVar") - + mean_respVar <- attr(pred_meanfit, "respVar") + disp_pred <- pred_dispfit[, 1] - disp_predVar <- attr(pred_dispfit, "predVar") ## same as mean_residVar (as it should be) - disp_residVar <- attr(pred_dispfit, "residVar") - disp_respVar <- attr(pred_dispfit, "respVar") - }) ## end of system.time - + disp_predVar <- attr(pred_dispfit, "predVar") ## same as mean_residVar (as it should be) + disp_residVar <- attr(pred_dispfit, "residVar") + disp_respVar <- attr(pred_dispfit, "respVar") + }) ## end of system.time + ## display time time <- round(as.numeric((time)[3])) if (verbose) { - print(paste0("predictions for all ", nrow(xs), - " locations have been computed in ", time, "s."), quote = FALSE) + print(paste0( + "predictions for all ", nrow(xs), + " locations have been computed in ", time, "s." + ), quote = FALSE) } - + ## we store the predictions for mean isotopic values into a raster - save_raster <- function(x){ - .create_raster(long = xs$long, - lat = xs$lat, - values = x, - proj = "+proj=longlat +datum=WGS84" + save_raster <- function(x) { + .create_raster( + long = xs$long, + lat = xs$lat, + values = x, + proj = "+proj=longlat +datum=WGS84" ) } - + mean_raster <- save_raster(mean_pred) mean_predVar_raster <- save_raster(mean_predVar) mean_residVar_raster <- save_raster(mean_residVar) mean_respVar_raster <- save_raster(mean_respVar) - + disp_raster <- save_raster(disp_pred) disp_predVar_raster <- save_raster(disp_predVar) disp_residVar_raster <- save_raster(disp_residVar) disp_respVar_raster <- save_raster(disp_respVar) - - + + ## we create the spatial points for sources - source_points <- .create_spatial_points(long = isofit$mean_fit$data$long, - lat = isofit$mean_fit$data$lat, - proj = "+proj=longlat +datum=WGS84" + source_points <- .create_spatial_points( + long = isofit$mean_fit$data$long, + lat = isofit$mean_fit$data$lat, + proj = "+proj=longlat +datum=WGS84" ) - + ## we put all rasters in a brick - isoscapes <- terra::rast(list("mean" = mean_raster, - "mean_predVar" = mean_predVar_raster, - "mean_residVar" = mean_residVar_raster, - "mean_respVar" = mean_respVar_raster, - "disp" = disp_raster, - "disp_predVar" = disp_predVar_raster, - "disp_residVar" = disp_residVar_raster, - "disp_respVar" = disp_respVar_raster - ) - ) - + isoscapes <- terra::rast(list( + "mean" = mean_raster, + "mean_predVar" = mean_predVar_raster, + "mean_residVar" = mean_residVar_raster, + "mean_respVar" = mean_respVar_raster, + "disp" = disp_raster, + "disp_predVar" = disp_predVar_raster, + "disp_residVar" = disp_residVar_raster, + "disp_respVar" = disp_respVar_raster + )) + ## we put the brick in a list that also contains ## the spatial points for the sources - out <- list(isoscapes = isoscapes, - sp_points = list(sources = source_points) + out <- list( + isoscapes = isoscapes, + sp_points = list(sources = source_points) ) - + ## we define a new class class(out) <- c("ISOCAPE", "list") return(out) @@ -414,94 +471,101 @@ isoscape <- function(raster, #' Predicts the average spatial distribution of isotopic values over months, #' years... -#' +#' #' This function is the counterpart of [isoscape] for the objects #' created with [isomultifit]. It creates the isoscapes for each #' strata (e.g. month) defined by `split_by` during the call to #' [isomultifit] and the aggregate them. The function can handle #' weighting for the aggregation process and can thus be used to predict annual #' averages precipitation weighted isoscapes. -#' +#' #' @inheritParams isoscape #' @param weighting An optional RasterBrick containing the weights #' @return This function returns a *list* of class *ISOSCAPE* #' containing a set of all 8 raster layers mentioned above (all being of #' class *SpatRaster*), and the location of the sources as spatial points. #' @seealso -#' +#' #' [isoscape] for details on the function used to compute the isoscapes for each strata #' [isomultifit] for the function fitting the isoscape -#' +#' #' [plot.ISOSCAPE] for the function plotting the isoscape model -#' +#' #' [IsoriX] for the complete work-flow -#' +#' #' @keywords models regression prediction predict #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 180) { -#' -#' ## We prepare the data and split them by month: -#' -#' GNIPDataDEmonthly <- prepsources(data = GNIPDataDE, -#' split_by = "month") -#' -#' dim(GNIPDataDEmonthly) -#' -#' ## We fit the isoscapes:#' -#' GermanMultiFit <- isomultifit(data = GNIPDataDEmonthly, -#' mean_model_fix = list(elev = TRUE, lat.abs = TRUE)) -#' -#' ## We build the annual isoscapes by simple averaging (equal weighting): -#' GermanMultiscape <- isomultiscape(raster = ElevRasterDE, -#' isofit = GermanMultiFit) -#' -#' ## We build the annual isoscapes with a weighing based on precipitation amount: -#' GermanMultiscapeWeighted <- isomultiscape(raster = ElevRasterDE, -#' isofit = GermanMultiFit, -#' weighting = PrecipBrickDE) -#' -#' ## We plot the mean isoscape of the averaging with equal weighting: -#' plot(x = GermanMultiscape, which = "mean") -#' -#' ## We plot the mean isoscape of the averaging with precipitation weighting: -#' plot(x = GermanMultiscapeWeighted, which = "mean") -#' -#' ## We build the isoscapes for a given month (here January): -#' GermanScapeJan <- isoscape(raster = ElevRasterDE, -#' isofit = GermanMultiFit$multi_fits[["month_1"]]) -#' -#' ## We plot the mean isoscape for January: -#' plot(x = GermanScapeJan, which = "mean") -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 180) { +#' ## We prepare the data and split them by month: +#' +#' GNIPDataDEmonthly <- prepsources( +#' data = GNIPDataDE, +#' split_by = "month" +#' ) +#' +#' dim(GNIPDataDEmonthly) +#' +#' ## We fit the isoscapes:#' +#' GermanMultiFit <- isomultifit( +#' data = GNIPDataDEmonthly, +#' mean_model_fix = list(elev = TRUE, lat.abs = TRUE) +#' ) +#' +#' ## We build the annual isoscapes by simple averaging (equal weighting): +#' GermanMultiscape <- isomultiscape( +#' raster = ElevRasterDE, +#' isofit = GermanMultiFit +#' ) +#' +#' ## We build the annual isoscapes with a weighing based on precipitation amount: +#' GermanMultiscapeWeighted <- isomultiscape( +#' raster = ElevRasterDE, +#' isofit = GermanMultiFit, +#' weighting = PrecipBrickDE +#' ) +#' +#' ## We plot the mean isoscape of the averaging with equal weighting: +#' plot(x = GermanMultiscape, which = "mean") +#' +#' ## We plot the mean isoscape of the averaging with precipitation weighting: +#' plot(x = GermanMultiscapeWeighted, which = "mean") +#' +#' ## We build the isoscapes for a given month (here January): +#' GermanScapeJan <- isoscape( +#' raster = ElevRasterDE, +#' isofit = GermanMultiFit$multi_fits[["month_1"]] +#' ) +#' +#' ## We plot the mean isoscape for January: +#' plot(x = GermanScapeJan, which = "mean") #' } #' @export isomultiscape <- function(raster, ## change as method? isofit, weighting = NULL, - verbose = interactive() -) { - + verbose = interactive()) { ## In case the function is called on the output of isofit by mistake if (!inherits(isofit, "MULTIISOFIT")) { - return(isoscape(raster = raster, - isofit = isofit, - verbose = verbose - ) - ) + message("Your input for `isofit =` is not of class MULTIISOFIT, so your call to `isomultiscape()` has been automatically converted into a call to `isoscape()`.") + return(isoscape( + raster = raster, + isofit = isofit, + verbose = verbose + )) } - + ## Checking the inputs if (!is.null(weighting)) { - if (!inherits(weighting, c("RasterStack", "RasterBrick"))) { - stop("the argument 'weighting' should be a RasterStack or a RasterBrick") + if (!inherits(weighting, "SpatRaster")) { + stop("the argument 'weighting' should be a SpatRaster") } if (!all(names(isofit$multi.fits) %in% names(weighting))) { stop("the names of the layer in the object 'weighting' do not match those of your pairs of fits...") @@ -513,67 +577,73 @@ isomultiscape <- function(raster, ## change as method? stop("the resolution of the object 'weighting' and 'raster' differ") } } - - isoscapes <- lapply(names(isofit$multi_fits), - function(fit) { - if (verbose) { - print(paste("#### building isoscapes for", fit, " in progress ####"), quote = FALSE) - } - iso <- isoscape(raster = raster, - isofit = isofit$multi_fits[[fit]], - verbose = verbose - ) - iso$sp_points$sources$values <- fit ## set values for sp.points - return(iso) - } + + isoscapes <- lapply( + names(isofit$multi_fits), + function(fit) { + if (verbose) { + print(paste("#### building isoscapes for", fit, " in progress ####"), quote = FALSE) + } + iso <- isoscape( + raster = raster, + isofit = isofit$multi_fits[[fit]], + verbose = verbose + ) + iso$sp_points$sources$values <- fit ## set values for sp.points + return(iso) + } ) - + names(isoscapes) <- names(isofit$multi_fits) - + ## Combining mean isoscapes into RasterBricks brick_mean <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$mean)) brick_mean_predVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$mean_predVar)) brick_mean_residVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$mean_residVar)) brick_mean_respVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$mean_respVar)) - + ## Combining disp isoscapes into RasterBricks brick_disp <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$disp)) brick_disp_predVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$disp_predVar)) brick_disp_residVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$disp_residVar)) brick_disp_respVar <- terra::rast(lapply(isoscapes, function(iso) iso$isoscapes$disp_respVar)) - + ## Compute the weights if (is.null(weighting)) { weights <- terra::rast(raster) - weights <- terra::setValues(weights, 1/length(isoscapes)) + weights <- terra::setValues(weights, 1 / length(isoscapes)) } else { weights <- weighting / sum(weighting) } - + ## Compute the weighted averages and store then in a list of RasterBricks - multiscape <- terra::rast(list("mean" = sum(brick_mean * weights), - "mean_predVar" = sum(brick_mean_predVar * weights^2), - "mean_residVar" = sum(brick_mean_residVar * weights^2), - "mean_respVar" = sum(brick_mean_respVar * weights^2), - "disp" = sum(brick_disp * weights), - "disp_predVar" = sum(brick_disp_predVar * weights^2), - "disp_residVar" = sum(brick_disp_residVar * weights^2), - "disp_respVar" = sum(brick_disp_respVar * weights^2) - ) - ) - + multiscape <- terra::rast(list( + "mean" = sum(brick_mean * weights), + "mean_predVar" = sum(brick_mean_predVar * weights^2), + "mean_residVar" = sum(brick_mean_residVar * weights^2), + "mean_respVar" = sum(brick_mean_respVar * weights^2), + "disp" = sum(brick_disp * weights), + "disp_predVar" = sum(brick_disp_predVar * weights^2), + "disp_residVar" = sum(brick_disp_residVar * weights^2), + "disp_respVar" = sum(brick_disp_respVar * weights^2) + )) + ## Agglomerate the sources spatial points - source_points <- Reduce("+", lapply(isoscapes, function(iso) iso$sp_points$sources)) - + # source_points <- Reduce("+", lapply(isoscapes, function(iso) iso$sp_points$sources)) ## https://github.com/rspatial/terra/issues/1337 + source_points <- Reduce("rbind", lapply(isoscapes, function(iso) iso$sp_points$sources)) + terra::values(source_points) <- NULL + source_points <- terra::unique(source_points) + ## we put the brick in a list that also contains ## the spatial points for the sources - out <- list(isoscapes = multiscape, - sp_points = list(sources = source_points) + out <- list( + isoscapes = multiscape, + sp_points = list(sources = source_points) ) - + ## we define a new class class(out) <- c("ISOSCAPE", "list") - + return(out) } @@ -601,4 +671,3 @@ summary.ISOSCAPE <- function(object, ...) { } return(invisible(NULL)) } - diff --git a/IsoriX/R/isosim.R b/IsoriX/R/isosim.R index b309974..1fcc718 100644 --- a/IsoriX/R/isosim.R +++ b/IsoriX/R/isosim.R @@ -1,8 +1,8 @@ #' Simulate isotopic values -#' +#' #' This function allows for the simulation of isoscapes. Both partial or #' complete (i.e. maps) isoscapes can be simulated. -#' +#' #' This function takes as inputs the values for all covariates matching a #' series of locations (which can be provided as an structural raster or as a #' *data.frame*), as well as the parameters of the isoscape model. The @@ -15,7 +15,7 @@ #' drawn independently, which is not the case in nature. Note that extra #' parameters present in the input lists will not make the function crash but #' won't be considered during computations either. -#' +#' #' @param data A *data.frame* containing the covariates needed for the #' simulation, or alternatively a structural raster of class *SpatRaster* #' @param mean_model_fix_coef A *vector* of coefficients for fixed-effects @@ -47,86 +47,91 @@ #' `RandomFields::RMnugget`, respectively. These two functions are #' part of the powerful package \pkg{RandomFields} (currently retired from CRAN). #' @seealso [isofit] for the function fitting the isoscape model -#' +#' #' [IsoriX] for the complete work-flow #' @keywords simulate simulation #' @examples -#' -#' +#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 60) { -#' -#' ## We simulate data under default settings -#' Simu <- isosim(data = ElevRasterDE, -#' save_dataframe = TRUE, -#' seed = 1) -#' -#' Simu -#' -#' ## We build the plots of the outcome using IsoriX -#' PlotMeanSimu <- plot(x = Simu, which = "mean") -#' -#' PlotDispSimu <- plot(x = Simu, which = "disp") -#' -#' -#' ## We fit the simulated data by sampling 50 locations -#' -#' set.seed(123) -#' Newdat <- Simu$data[sample(1:nrow(Simu$data), 50), ] -#' -#' NewdatFit <- isofit(data = Newdat, -#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' Isoscape <- isoscape(ElevRasterDE, NewdatFit) -#' -#' PlotMeanFitted <- plot(x = Isoscape, which = "mean", sources = list(draw = FALSE)) -#' -#' PlotDispFitted <- plot(x = Isoscape, which = "disp", sources = list(draw = FALSE)) -#' -#' ## We compare simulated and fitted data visually -#' print(PlotMeanSimu, split = c(1, 1, 2, 2), more = TRUE) -#' print(PlotDispSimu, split = c(2, 1, 2, 2), more = TRUE) -#' print(PlotMeanFitted, split = c(1, 2, 2, 2), more = TRUE) -#' print(PlotDispFitted, split = c(2, 2, 2, 2), more = FALSE) -#' -#' ## We compare simulated and fitted data numerically -#' ## Note that differences are expected, as the geographic -#' ## area is small compared to the scale at which -#' ## spatial auto-correlation occurs -#' round(cbind( +#' +#' if (getOption_IsoriX("example_maxtime") > 60) { +#' ## We simulate data under default settings +#' Simu <- isosim( +#' data = ElevRasterDE, +#' save_dataframe = TRUE, +#' seed = 1 +#' ) +#' +#' Simu +#' +#' ## We build the plots of the outcome using IsoriX +#' PlotMeanSimu <- plot(x = Simu, which = "mean") +#' +#' PlotDispSimu <- plot(x = Simu, which = "disp") +#' +#' +#' ## We fit the simulated data by sampling 50 locations +#' +#' set.seed(123) +#' Newdat <- Simu$data[sample(1:nrow(Simu$data), 50), ] +#' +#' NewdatFit <- isofit( +#' data = Newdat, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' Isoscape <- isoscape(ElevRasterDE, NewdatFit) +#' +#' PlotMeanFitted <- plot(x = Isoscape, which = "mean", sources = list(draw = FALSE)) +#' +#' PlotDispFitted <- plot(x = Isoscape, which = "disp", sources = list(draw = FALSE)) +#' +#' ## We compare simulated and fitted data visually +#' print(PlotMeanSimu, split = c(1, 1, 2, 2), more = TRUE) +#' print(PlotDispSimu, split = c(2, 1, 2, 2), more = TRUE) +#' print(PlotMeanFitted, split = c(1, 2, 2, 2), more = TRUE) +#' print(PlotDispFitted, split = c(2, 2, 2, 2), more = FALSE) +#' +#' ## We compare simulated and fitted data numerically +#' ## Note that differences are expected, as the geographic +#' ## area is small compared to the scale at which +#' ## spatial auto-correlation occurs +#' round(cbind( #' simulated = c( -#' intercept = 64, -#' lat_abs = -2.3, -#' elev = -0.01, -#' nu = 0.35, -#' rho = 5e-5, -#' rho_div_nu = 5e-5/0.35, -#' lambda_ID = 0, -#' lambda_matern = 899, -#' intercept_disp = 5.8, -#' nu_disp = 3.2e-01, -#' rho_disp = 1.5e-05, -#' lambda_matern_source_ID = 0, -#' lambda_matern_disp = 5), +#' intercept = 64, +#' lat_abs = -2.3, +#' elev = -0.01, +#' nu = 0.35, +#' rho = 5e-5, +#' rho_div_nu = 5e-5 / 0.35, +#' lambda_ID = 0, +#' lambda_matern = 899, +#' intercept_disp = 5.8, +#' nu_disp = 3.2e-01, +#' rho_disp = 1.5e-05, +#' lambda_matern_source_ID = 0, +#' lambda_matern_disp = 5 +#' ), #' fitted = c( -#' intercept = NewdatFit$mean_fit$fixef[1], -#' lat.abs = NewdatFit$mean_fit$fixef[2], -#' elev = NewdatFit$mean_fit$fixef[3], -#' nu = get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]]$nu, -#' rho = get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]]$rho, -#' rho_div_nu = with(get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]],rho/nu), -#' lambda.matern = NewdatFit$mean_fit$lambda, -#' intercept.disp = NewdatFit$disp_fit$fixef[1], -#' nu.disp = get_ranPars(NewdatFit$disp_fit, which = "corrPars")[[1]]$nu, -#' rho.disp = get_ranPars(NewdatFit$disp_fit, which = "corrPars")[[1]]$rho, -#' lambda.matern.disp = NewdatFit$disp_fit$lambda)), 4) -#' +#' intercept = NewdatFit$mean_fit$fixef[1], +#' lat.abs = NewdatFit$mean_fit$fixef[2], +#' elev = NewdatFit$mean_fit$fixef[3], +#' nu = get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]]$nu, +#' rho = get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]]$rho, +#' rho_div_nu = with(get_ranPars(NewdatFit$mean_fit, which = "corrPars")[[1]], rho / nu), +#' lambda.matern = NewdatFit$mean_fit$lambda, +#' intercept.disp = NewdatFit$disp_fit$fixef[1], +#' nu.disp = get_ranPars(NewdatFit$disp_fit, which = "corrPars")[[1]]$nu, +#' rho.disp = get_ranPars(NewdatFit$disp_fit, which = "corrPars")[[1]]$rho, +#' lambda.matern.disp = NewdatFit$disp_fit$lambda +#' ) +#' ), 4) #' } -#' +#' #' @export isosim <- function(data, mean_model_fix_coef = c(intercept = 64, elev = -0.01, lat_abs = -2.3, lat_2 = 0, long = 0, long_2 = 0), @@ -139,26 +144,28 @@ isosim <- function(data, seed = NULL, save_dataframe = FALSE, verbose = interactive()) { - if (!requireNamespace("RandomFields", quietly = TRUE)) { stop("The package 'RandomFields' is needed for this function, you could install it by typing install.packages('RandomFields'), but the package is unfortunately currently retired...") } - + ## if data is a raster, we convert it as data.frame if (inherits(data, "SpatRaster")) { raster <- data - coord <- terra::crds(raster) - data <- data.frame(long = coord[, "x"], - long_2 = coord[, "x"]^2, - lat = coord[, "y"], - lat_2 = coord[, "y"]^2, - lat_abs = abs(coord[, "y"]), - elev = terra::extract(raster, coord), - n_source_value = rep(1e6, nrow(coord)), - source_ID = as.factor(paste("simu", 1:nrow(coord), sep = "_"))) - rm(coord); gc() ## remove coord as it can be a large object + coord <- terra::crds(raster, na.rm = FALSE) + data <- data.frame( + long = coord[, "x"], + long_2 = coord[, "x"]^2, + lat = coord[, "y"], + lat_2 = coord[, "y"]^2, + lat_abs = abs(coord[, "y"]), + elev = terra::extract(raster, coord), + n_source_value = rep(1e6, nrow(coord)), + source_ID = as.factor(paste("simu", seq_len(nrow(coord)), sep = "_")) + ) + rm(coord) + gc() ## remove coord as it can be a large object } ## test if inputs are correct @@ -166,7 +173,7 @@ isosim <- function(data, stop("The argument you chose for 'dist_method' is unknown.") } - if (sum(mean_model_uncorr_coef > 0) > 1 | sum(disp_model_uncorr_coef > 0) > 1) { + if (sum(mean_model_uncorr_coef > 0) > 1 || sum(disp_model_uncorr_coef > 0) > 1) { stop("The arguments 'mean_model_uncorr_coef' and 'disp_model_uncorr_coef' must each have at least one coefficient equals to zero (considering two parameterizations of uncorrelated random effects does not make any sense.)") } @@ -187,13 +194,15 @@ isosim <- function(data, ## define the seeds (one for RandomFields, one for other R functions) and set other options for RandomFields set.seed(seed) - RandomFields::RFoptions(seed = ifelse(is.null(seed), NA, seed), - spConform = FALSE, ##so that RFsimulte returns vector directly - cPrintlevel = 1) ##cPrintlevel = 3 for more details + RandomFields::RFoptions( + seed = ifelse(is.null(seed), NA, seed), + spConform = FALSE, ## so that RFsimulte returns vector directly + cPrintlevel = 1 + ) ## cPrintlevel = 3 for more details if (dist_method == "Earth") { RandomFields::RFoptions(new_coord_sys = "earth") - } + } if (dist_method == "Euclidean") { RandomFields::RFoptions(new_coord_sys = "cartesian") } @@ -207,10 +216,12 @@ isosim <- function(data, } ## compute the linear predictor - LinpredDisp <- .linear_predictor(fix_coef = disp_model_fix_coef, - matern_coef = disp_model_matern_coef, - uncorr_coef = disp_model_uncorr_coef, - data = data) + LinpredDisp <- .linear_predictor( + fix_coef = disp_model_fix_coef, + matern_coef = disp_model_matern_coef, + uncorr_coef = disp_model_uncorr_coef, + data = data + ) data$disp_logvar_fix <- LinpredDisp$fix data$disp_logvar_matern <- LinpredDisp$matern @@ -219,8 +230,9 @@ isosim <- function(data, ## add residual variance data$var_source_value <- stats::rgamma(nrow(data), - shape = (data$disp_mean^2)/2, - scale = 2/data$disp_mean) + shape = (data$disp_mean^2) / 2, + scale = 2 / data$disp_mean + ) ### Simulate the mean if (verbose) { @@ -228,10 +240,12 @@ isosim <- function(data, } ## compute the linear predictor - MeanLinpred <- .linear_predictor(fix_coef = mean_model_fix_coef, - matern_coef = mean_model_matern_coef, - uncorr_coef = mean_model_uncorr_coef, - data = data) + MeanLinpred <- .linear_predictor( + fix_coef = mean_model_fix_coef, + matern_coef = mean_model_matern_coef, + uncorr_coef = mean_model_uncorr_coef, + data = data + ) data$mean_var_fix <- MeanLinpred$fix data$mean_var_matern <- MeanLinpred$matern @@ -244,12 +258,14 @@ isosim <- function(data, print("Building rasters...") } - save_raster <- function(x){ + save_raster <- function(x) { with(data, { - .create_raster(long = long, - lat = lat, - values = get(x), - proj = "+proj=longlat +datum=WGS84") + .create_raster( + long = long, + lat = lat, + values = get(x), + proj = "+proj=longlat +datum=WGS84" + ) }) } @@ -259,15 +275,17 @@ isosim <- function(data, ### Buidling return object out <- list() - out$isoscapes <- terra::rast(list("mean" = mean_raster, - "disp" = disp_raster)) + out$isoscapes <- terra::rast(list( + "mean" = mean_raster, + "disp" = disp_raster + )) - if (!save_dataframe & interactive()) { + if (!save_dataframe && interactive()) { message(paste("Note: simulated data not saved as data.frame (save_dataframe is set to FALSE). Saving the simulated data as data.frame would require", format(utils::object.size(data), units = "MB"))) } else { out$data <- data } - + class(out) <- c("ISOSCAPE", "ISOSIM", "list") return(out) @@ -277,31 +295,35 @@ isosim <- function(data, .linear_predictor <- function(fix_coef, matern_coef, uncorr_coef, data) { ## This function should not be called by the user but is itself called by other functions. ## It builds the linear predictor for the simulations - + if (!requireNamespace("RandomFields", quietly = TRUE)) { stop("the package 'RandomFields' is needed for this function, you can install it by typing install.packages('RandomFields')") } - + ## fixed effects fix <- with(as.list(fix_coef), intercept + - elev*data$elev + lat_abs*data$lat_abs + lat_2*data$lat_2 + - long*data$long + long_2*data$long_2) + elev * data$elev + lat_abs * data$lat_abs + lat_2 * data$lat_2 + + long * data$long + long_2 * data$long_2) ## spatial random effects matern <- 0 if (matern_coef["lambda"] > 0) { - model_matern <- with(as.list(matern_coef), - RandomFields::RMwhittle(nu = nu, var = lambda, scale = 1/rho)) + model_matern <- with( + as.list(matern_coef), + RandomFields::RMwhittle(nu = nu, var = lambda, scale = 1 / rho) + ) matern <- RandomFields::RFsimulate(model_matern, - x = data$long, y = data$lat) + x = data$long, y = data$lat + ) } ## uncorr random effects uncorr <- stats::rnorm(nrow(data), mean = 0, sd = sqrt(uncorr_coef["lambda"])) if (uncorr_coef["nugget"] > 0) { uncorr <- uncorr + RandomFields::RFsimulate(RandomFields::RMnugget(var = uncorr_coef["nugget"]), - x = data$long, y = data$lat) + x = data$long, y = data$lat + ) } eta_sum <- fix + matern + uncorr return(list("fix" = fix, "matern" = matern, "uncorr" = uncorr, "eta_sum" = eta_sum)) diff --git a/IsoriX/R/options.R b/IsoriX/R/options.R index 0c871a7..dea2da6 100644 --- a/IsoriX/R/options.R +++ b/IsoriX/R/options.R @@ -1,4 +1,3 @@ - #' Setting and displaying the options of the package #' #' @name options @@ -8,6 +7,7 @@ #' \item{example_maxtime}{The number of seconds allowed for a given example to run. It is used to control whether the longer examples should be run or not based on the comparison between this option and the approximate running time of the example on our computers.} #' \item{Ncpu}{An *integer* corresponding to the number of cores to be used (in functions that can handle parallel processing)} #' \item{dont_ask}{A *logical* indicating if the user prompt during interactive session during plotting must be inactivated (for development purposes only)} +#' \item{spaMM_debugmod}{A *logical* indicating if the warnings and errors produced by the spaMM package should stopped being turned into messages (for development purposes only)} #' } #' #' @return The options are invisibly returned in an object called `IsoriX:::.data_IsoriX$options` @@ -21,19 +21,23 @@ #' options_IsoriX() #' options_IsoriX(example_maxtime = OldOptions$example_maxtime) #' options_IsoriX() - options_IsoriX <- function(...) { ## as in spaMM - if (nargs() == 0) return(.data_IsoriX$IsoriX_options) + if (nargs() == 0) { + return(.data_IsoriX$IsoriX_options) + } current <- .data_IsoriX$IsoriX_options temp <- list(...) if (length(temp) == 1 && is.null(names(temp))) { arg <- temp[[1]] switch(mode(arg), - list = temp <- arg, - character = return(.data_IsoriX$IsoriX_options[arg]), - stop("invalid argument: ", sQuote(arg))) + list = temp <- arg, + character = return(.data_IsoriX$IsoriX_options[arg]), + stop("invalid argument: ", sQuote(arg)) + ) + } + if (length(temp) == 0) { + return(current) } - if (length(temp) == 0) return(current) n <- names(temp) if (is.null(n)) stop("options must be given by name") current[n] <- temp @@ -54,10 +58,12 @@ getOption_IsoriX <- function(x = NULL) { stop("argument 'x' must be of length 1 maximum") } if (x == "Ncpu" && (options_IsoriX(x)[["Ncpu"]] > parallel::detectCores())) { - warning(paste(options_IsoriX(x)[["Ncpu"]], - "CPUs were requested, but the maximum number of CPU on this machine is", - parallel::detectCores(), - "so the Ncpu option of this package has been corrected")) + warning(paste( + options_IsoriX(x)[["Ncpu"]], + "CPUs were requested, but the maximum number of CPU on this machine is", + parallel::detectCores(), + "so the Ncpu option of this package has been corrected" + )) options_IsoriX(Ncpu = parallel::detectCores()) } if (x == "Ncpu") { @@ -69,11 +75,13 @@ getOption_IsoriX <- function(x = NULL) { if (x == "dont_ask") { return(options_IsoriX(x)[["dont_ask"]]) } + if (x == "spaMM_debug") { + return(options_IsoriX(x)[["spaMM_debug"]]) + } stop("option not found") } ## Setting default package options .data_IsoriX <- new.env(parent = emptyenv()) -.data_IsoriX$IsoriX_options <- list(example_maxtime = 5, Ncpu = 2L, dont_ask = FALSE) ## put example_maxtime = 500 to check all examples - ## otherwise put 5 - +.data_IsoriX$IsoriX_options <- list(example_maxtime = 5, Ncpu = 2L, dont_ask = FALSE, spaMM_debug = FALSE) ## put example_maxtime = 500 to check all examples +## otherwise put 5 diff --git a/IsoriX/R/plots.R b/IsoriX/R/plots.R index b77da1d..dc0c266 100644 --- a/IsoriX/R/plots.R +++ b/IsoriX/R/plots.R @@ -5,7 +5,7 @@ #' #' #' **General** -#' +#' #' When called upon an object of class *ISOFIT*, the plot function #' draws diagnostic information for the fits of the isoscape geostatistical #' model. @@ -22,7 +22,7 @@ #' #' #' **Plotting isoscapes** -#' +#' #' When used on a fitted isoscape, the user can choose between plotting the #' predictions (`which = "mean"`; default), the prediction variance (`which = #' "mean_predVar"`), the residual variance (`which = "mean_residVar"`), or the @@ -35,9 +35,9 @@ #' from CRAN), the user can choose between plotting the mean isotopic value #' (`which = "mean"`) or the residual dispersion (`which = "disp"`). #' -#' +#' #' **Plotting assignments** -#' +#' #' When called upon an object of class *ISOFIND*, the plot function draws a #' fine-tuned plot of the assignment. You can use the argument `who` to choose #' between plotting the assignment for the group or for some individuals (check @@ -45,7 +45,7 @@ #' #' #' **Info on parameters influencing the rendering of maps** -#' +#' #' The argument `y_title` is a list that can be tweaked to customise the title #' of isoscapes. Within this list, the element `which` is a logical indicating #' if the name of the layer should be displayed or not. The element `title` is a @@ -86,14 +86,14 @@ #' the function used is the palette [viridisLite::viridis]. #' #' **Default symbols used on maps** -#' -#' Under the default settings, we chose to +#' +#' Under the default settings, we chose to #' represent: #' - the source data by little red triangles. #' - the calibration data by little blue crosses. #' - the locations where the samples to assign were collected by white #' diamonds. -#' +#' #' These symbols can be changed as explained above. #' #' @name plots @@ -131,7 +131,7 @@ #' be returned as a rotating sphere and if the image created during the #' process should be saved in your current working directory. The default #' settings are FALSE and TRUE, respectively. -#' +#' #' @param xlab A *string* the x-axis label in plot.CALIBFIT #' @param ylab A *string* the y-axis label in plot.CALIBFIT #' @param xlim A range defining the extreme coordinates for the the x-axis in @@ -163,128 +163,136 @@ #' #' @keywords plot #' @examples ## See ?isoscape or ?isofind for examples -#' +#' NULL #' @rdname plots #' @method plot ISOSCAPE #' @exportS3Method plot ISOSCAPE plot.ISOSCAPE <- function(x, - which = "mean", - y_title = list(which = TRUE, title = bquote(delta**2*H)), ## bquote(italic("\u03B4")**2*H[p]) does not work on all system... + which = "mean", + y_title = list(which = TRUE, title = bquote(delta**2 * H)), ## bquote(italic("\u03B4")**2*H[p]) does not work on all system... sources = list(draw = TRUE, cex = 0.5, pch = 2, lwd = 1, col = "red"), borders = list(borders = NA, lwd = 0.5, col = "black"), - mask = list(mask = NA, lwd = 0, col = "black", fill = "black"), + mask = list(mask = NA, lwd = 0, col = "black", fill = "black"), palette = list(step = NA, range = c(NA, NA), n_labels = 11, digits = 2, fn = NA), - plot = TRUE, - sphere = list(build = FALSE, keep_image = TRUE), + plot = TRUE, + sphere = list(build = FALSE, keep_image = TRUE), ... ## we cannot remove the dots because of the S3 export... - ) { - if (!inherits(x, "ISOSCAPE")) { - stop("This function must be called on an object of class ISOSCAPE.") - } - - simu <- "ISOSIM" %in% class(x) +) { + if (!inherits(x, "ISOSCAPE")) { + stop("This function must be called on an object of class ISOSCAPE.") + } - ## complete input with default setting - .complete_args(plot.ISOSCAPE) + simu <- "ISOSIM" %in% class(x) - ## importing palette if missing - if (!is.null(palette$fn) && !is.function(palette$fn) && is.na(palette$fn)) { - isopalette1 <- NULL ## to please R CMD check - utils::data("isopalette1", envir = environment(), package = "IsoriX") - palette$fn <- grDevices::colorRampPalette(isopalette1, bias = 1) - } + ## complete input with default setting + .complete_args(plot.ISOSCAPE) - ## importing country borders if missing - if (!is.null(borders$borders) && is.na(borders$borders)) { - borders$borders <- terra::readRDS(system.file("extdata/CountryBorders.rds", package = "IsoriX")) - } + ## importing palette if missing + if (!is.null(palette$fn) && !is.function(palette$fn) && is.na(palette$fn)) { + isopalette1 <- NULL ## to please R CMD check + utils::data("isopalette1", envir = environment(), package = "IsoriX") + palette$fn <- grDevices::colorRampPalette(isopalette1, bias = 1) + } - ## importing ocean if missing - if (!is.null(mask$mask) && !inherits(mask$mask, "SpatVector") && is.na(mask$mask)) { - mask$mask <- terra::readRDS(system.file("extdata/OceanMask.rds", package = "IsoriX")) - } + ## importing country borders if missing + if (!is.null(borders$borders) && is.na(borders$borders)) { + borders$borders <- terra::readRDS(system.file("extdata/CountryBorders.rds", package = "IsoriX")) + } - if (simu) { - if (sources$draw) { - sources$draw <- FALSE - message("You have asked to plot sources, but it does not make sense for simulations as each raster cell is a source. The argument 'plot.sources' was thus considered to be FALSE.") - } - if (!(which %in% c("mean", "disp"))) { - stop("For simulated data, the argument 'which' must be 'mean' or 'disp'.") - } - } else { - if (!(which %in% c("mean", "mean_predVar", "mean_residVar", "mean_respVar", - "disp", "disp_predVar", "disp_residVar", "disp_respVar"))) { + ## importing ocean if missing + if (!is.null(mask$mask) && !inherits(mask$mask, "SpatVector") && is.na(mask$mask)) { + mask$mask <- terra::readRDS(system.file("extdata/OceanMask.rds", package = "IsoriX")) + } + + if (simu) { + if (sources$draw) { + sources$draw <- FALSE + message("You have asked to plot sources, but it does not make sense for simulations as each raster cell is a source. The argument 'plot.sources' was thus considered to be FALSE.") + } + if (!(which %in% c("mean", "disp"))) { + stop("For simulated data, the argument 'which' must be 'mean' or 'disp'.") + } + } else { + if (!(which %in% c( + "mean", "mean_predVar", "mean_residVar", "mean_respVar", + "disp", "disp_predVar", "disp_residVar", "disp_respVar" + ))) { stop("argument 'which' unknown") - } } + } + + ## compute the colors + colours <- .cut_and_color( + var = x$isoscape[[which]], # @data@values, + step = palette$step, + range = palette$range, + palette = palette$fn, + n_labels = palette$n_labels, + digits = palette$digits + ) + + ## define y title + Title <- "" + if (simu) Title <- "simulated" + if (y_title$which) Title <- paste(Title, sub("_", " ", which, fixed = TRUE)) + if (!is.null(y_title$title)) Title <- bquote(.(Title) ~ .(y_title$title)) + + ## create the levelplot + ## note the use of bquote() which contrary to expression(paste()) + ## allows for the evaluation of arguments. + ## (the stars are used to remove spaces) + + map <- rasterVis::levelplot(x$isoscape[[which]], + maxpixels = prod(dim(x$isoscape[[which]])[1:2]), + margin = FALSE, + col.regions = colours$all_cols, + at = colours$at, + colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), + main = Title + ) + + ## create the additional plot(s) + decor <- .build_additional_layers( + x = x, + sources = sources, + calibs = NULL, + borders = borders, + mask = mask + ) + - ## compute the colors - colours <- .cut_and_color(var = x$isoscape[[which]], #@data@values, - step = palette$step, - range = palette$range, - palette = palette$fn, - n_labels = palette$n_labels, - digits = palette$digits) - - ## define y title - Title <- "" - if (simu) Title <- "simulated" - if (y_title$which) Title <- paste(Title, sub("_", " ", which, fixed = TRUE)) - if (!is.null(y_title$title)) Title <- bquote(.(Title)~.(y_title$title)) - - ## create the levelplot - ## note the use of bquote() which contrary to expression(paste()) - ## allows for the evaluation of arguments. - ## (the stars are used to remove spaces) - - map <- rasterVis::levelplot(x$isoscape[[which]], - maxpixels = prod(dim(x$isoscape[[which]])[1:2]), - margin = FALSE, - col.regions = colours$all_cols, - at = colours$at, - colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), - main = Title) - - ## create the additional plot(s) - decor <- .build_additional_layers(x = x, - sources = sources, - calibs = NULL, - borders = borders, - mask = mask) - - - complete_map <- map + decor$borders_layer + decor$mask_layer + decor$sources_layer - - ## plotting - if (plot & !sphere$build) { - ## check if prompt must appear in examples - if (.data_IsoriX$IsoriX_options$dont_ask) { - options(example_ask = "FALSE") ## only effective for the next example run, not the current one... - } - ## send plot to graphic device - print(complete_map) + complete_map <- map + decor$borders_layer + decor$mask_layer + decor$sources_layer + + ## plotting + if (plot && !sphere$build) { + ## check if prompt must appear in examples + if (.data_IsoriX$IsoriX_options$dont_ask) { + options(example_ask = "FALSE") ## only effective for the next example run, not the current one... } + ## send plot to graphic device + print(complete_map) + } + + ## tweak to please codetools::checkUsagePackage('IsoriX', skipWith = TRUE) + rm(Title) - ## tweak to please codetools::checkUsagePackage('IsoriX', skipWith = TRUE) - rm(Title) - - ## build the 3D-Sphere - if (sphere$build) { - .build_sphere(x$isoscape[[which]], colours = colours, decor = decor) - if (!sphere$keep_image) { - message(paste("IsoriX no longer delete the image used to build the sphere since it prevents rgl to work on some system. \n", - "You can always delete manually the file created which is stored here: \n", - normalizePath(file.path("IsoriX_world_image.png")))) - #file.remove("IsoriX_world_image.png") - } - message("If you do not see the sphere, run rgl::rglwidget()") + ## build the 3D-Sphere + if (sphere$build) { + .build_sphere(x$isoscape[[which]], colours = colours, decor = decor) + if (!sphere$keep_image) { + message(paste( + "IsoriX no longer delete the image used to build the sphere since it prevents rgl to work on some system. \n", + "You can always delete manually the file created which is stored here: \n", + normalizePath(file.path("IsoriX_world_image.png")) + )) + # file.remove("IsoriX_world_image.png") } - - return(invisible(complete_map)) + message("If you do not see the sphere, run rgl::rglwidget()") + } + return(invisible(complete_map)) } .build_sphere <- function(x, colours, decor) { @@ -293,7 +301,7 @@ plot.ISOSCAPE <- function(x, stop("The package 'rgl' is needed for this function, you can install it by typing install.packages('rgl').") } - + if (interactive()) { print("Building the sphere...", quote = FALSE) print("(this may take a few seconds)", quote = FALSE) @@ -304,43 +312,48 @@ plot.ISOSCAPE <- function(x, terra::ext(x) <- terra::ext(world_raster) } p <- rasterVis::levelplot(x, - col.regions = colours$all_cols, - at = colours$at, - colorkey = FALSE, - maxpixel = prod(dim(x)[1:2])) + decor$borders_layer + + col.regions = colours$all_cols, + at = colours$at, + colorkey = FALSE, + maxpixel = prod(dim(x)[1:2]) + ) + decor$borders_layer + decor$mask_layer + decor$mask2_layer + decor$sources_layer + decor$calibs_layer - grDevices::png(filename = "IsoriX_world_image.png", width = 2*dim(x)[2], height = 2*dim(x)[1]) + grDevices::png(filename = "IsoriX_world_image.png", width = 2 * dim(x)[2], height = 2 * dim(x)[1]) print(p) pargs <- lattice::trellis.panelArgs(p, 1) lims <- do.call(lattice::prepanel.default.levelplot, pargs) - + grid::grid.newpage() - grid::pushViewport(grid::viewport(xscale = grDevices::extendrange(lims$xlim, f = 0.07), - yscale = grDevices::extendrange(lims$ylim, f = 0.07))) + grid::pushViewport(grid::viewport( + xscale = grDevices::extendrange(lims$xlim, f = 0.07), + yscale = grDevices::extendrange(lims$ylim, f = 0.07) + )) do.call(p$panel, pargs) grDevices::dev.off() - + while (length(rgl::rgl.dev.list()) > 0) rgl::close3d() ## close all open rgl devices makerglsphere <- function(x, y = NULL, z = NULL, ng = 50, radius = 1, color = "white", add = FALSE, ...) { ## code inspired from https://stackoverflow.com/questions/30627647/how-to-plot-a-perfectly-round-sphere-in-r-rgl-spheres - lat <- matrix(seq(90, -90, len = ng)*pi/180, ng, ng, byrow = TRUE) - long <- matrix(seq(-180, 180, len = ng)*pi/180, ng, ng) + lat <- matrix(seq(90, -90, len = ng) * pi / 180, ng, ng, byrow = TRUE) + long <- matrix(seq(-180, 180, len = ng) * pi / 180, ng, ng) xyz <- grDevices::xyz.coords(x, y, z, recycle = TRUE) vertex <- matrix(rbind(xyz$x, xyz$y, xyz$z), nrow = 3, dimnames = list(c("x", "y", "z"), NULL)) nvertex <- ncol(vertex) - radius <- rbind(vertex, rep(radius, length.out = nvertex))[4,] - color <- rbind(vertex, rep(color, length.out = nvertex))[4,] - + radius <- rbind(vertex, rep(radius, length.out = nvertex))[4, ] + color <- rbind(vertex, rep(color, length.out = nvertex))[4, ] + for (i in 1:nvertex) { - add2 <- if (!add) i > 1 else T - x <- vertex[1,i] + radius[i]*cos(lat)*cos(long) - y <- vertex[2,i] + radius[i]*cos(lat)*sin(long) - z <- vertex[3,i] + radius[i]*sin(lat) - rgl::persp3d(x, y, z, add = add2, color = color[i], axes = FALSE, - xlab = "", ylab = "", zlab = "", ...) + add2 <- if (!add) i > 1 else TRUE + x <- vertex[1, i] + radius[i] * cos(lat) * cos(long) + y <- vertex[2, i] + radius[i] * cos(lat) * sin(long) + z <- vertex[3, i] + radius[i] * sin(lat) + rgl::persp3d(x, y, z, + add = add2, color = color[i], axes = FALSE, + xlab = "", ylab = "", zlab = "", ... + ) } } - + rgl::par3d("windowRect" = c(0, 0, 500, 500)) rgl::bg3d(sphere = TRUE, color = "darkgrey", lit = FALSE) makerglsphere(0, texture = "IsoriX_world_image.png", lit = FALSE, color = "white") ## alternative to rgl::spheres3d() @@ -350,26 +363,25 @@ plot.ISOSCAPE <- function(x, #' @method plot ISOFIND #' @exportS3Method plot ISOFIND plot.ISOFIND <- function(x, - who = "group", - cutoff = list(draw = TRUE, level = 0.05, col = "#909090"), - sources = list(draw = TRUE, cex = 0.5, pch = 2, lwd = 1, col = "red"), - calibs = list(draw = TRUE, cex = 0.5, pch = 4, lwd = 1, col = "blue"), - assigns = list(draw = TRUE, cex = 0.5, pch = 5, lwd = 1, col = "white"), - borders = list(borders = NA, lwd = 0.5, col = "black"), - mask = list(mask = NA, lwd = 0, col = "black", fill = "black"), - mask2 = list(mask = NA, lwd = 0, col = "purple", fill = "purple"), - palette = list(step = NA, range = c(0, 1), n_labels = 11, digits = 2, fn = NA), - plot = TRUE, - sphere = list(build = FALSE, keep_image = TRUE), - ... ## we cannot remove the dots because of the S3 export... - ) { - + who = "group", + cutoff = list(draw = TRUE, level = 0.05, col = "#909090"), + sources = list(draw = TRUE, cex = 0.5, pch = 2, lwd = 1, col = "red"), + calibs = list(draw = TRUE, cex = 0.5, pch = 4, lwd = 1, col = "blue"), + assigns = list(draw = TRUE, cex = 0.5, pch = 5, lwd = 1, col = "white"), + borders = list(borders = NA, lwd = 0.5, col = "black"), + mask = list(mask = NA, lwd = 0, col = "black", fill = "black"), + mask2 = list(mask = NA, lwd = 0, col = "purple", fill = "purple"), + palette = list(step = NA, range = c(0, 1), n_labels = 11, digits = 2, fn = NA), + plot = TRUE, + sphere = list(build = FALSE, keep_image = TRUE), + ... ## we cannot remove the dots because of the S3 export... +) { what <- "pv" ## ToDo: implement other possibilities - + if (!inherits(x, "ISOFIND")) { stop("This function must be called on an object of class ISOFIND") } - + ## complete input with default setting .complete_args(plot.ISOFIND) @@ -384,7 +396,7 @@ plot.ISOFIND <- function(x, if (!is.null(borders$borders) && is.na(borders$borders)) { borders$borders <- terra::readRDS(system.file("extdata/CountryBorders.rds", package = "IsoriX")) } - + ## importing ocean if missing if (!is.null(mask$mask) && !inherits(mask$mask, "SpatVector") && is.na(mask$mask)) { mask$mask <- terra::readRDS(system.file("extdata/OceanMask.rds", package = "IsoriX")) @@ -396,10 +408,10 @@ plot.ISOFIND <- function(x, } ## changing cutoff level to null when we don't want to draw the cutoff - if (what != "pv" | !cutoff$draw) { + if (what != "pv" || !cutoff$draw) { cutoff$level <- 0 } - + ## changing who when sample is size one if ("group" %in% who && length(names(x$sample[[what]])) == 1) { who <- names(x$sample[[what]]) @@ -407,23 +419,30 @@ plot.ISOFIND <- function(x, ## create the main plot(s) if ("group" %in% who) { - colours <- .cut_and_color(var = x$group$pv, #@data@values, removed - step = palette$step, - range = palette$range, - palette = palette$fn, - cutoff = cutoff$level, - col_cutoff = cutoff$col, - n_labels = palette$n_labels, - digits = palette$digits) - - map <- rasterVis::levelplot(x$group$pv, # x$group$pv * (x$group$pv > cutoff$level) - maxpixels = prod(dim(x$group$pv)[1:2]), - margin = FALSE, - col.regions = colours$all_cols, - at = colours$at, - colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), - main = "Group assignment" - ) + colours <- .cut_and_color( + var = x$group$pv, # @data@values, removed + step = palette$step, + range = palette$range, + palette = palette$fn, + cutoff = cutoff$level, + col_cutoff = cutoff$col, + n_labels = palette$n_labels, + digits = palette$digits + ) + + group_noNAs <- terra::classify(x$group$pv, cbind(NA, NA, 0)) + if (!identical(terra::values(group_noNAs), terra::values(x$group$pv))) { + warning("The assignment test occurred at location(s) with unknown isoscape value(s); p-values set to 0 for such (a) location(s).") + } + + map <- rasterVis::levelplot(group_noNAs, # x$group$pv * (x$group$pv > cutoff$level) + maxpixels = prod(dim(x$group$pv)[1:2]), + margin = FALSE, + col.regions = colours$all_cols, + at = colours$at, + colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), + main = "Group assignment" + ) } else { main_title <- if (length(who) == 1) { names(x$sample[[what]][[who]]) @@ -431,38 +450,42 @@ plot.ISOFIND <- function(x, NULL } - colours <- .cut_and_color(var = x$sample[[what]][[who]], #@data@values removed: does not work if raster on hard drive... - step = palette$step, - range = palette$range, - palette = palette$fn, - cutoff = cutoff$level, - col_cutoff = cutoff$col, - n_labels = palette$n_labels, - digits = palette$digits) + colours <- .cut_and_color( + var = x$sample[[what]][[who]], # @data@values removed: does not work if raster on hard drive... + step = palette$step, + range = palette$range, + palette = palette$fn, + cutoff = cutoff$level, + col_cutoff = cutoff$col, + n_labels = palette$n_labels, + digits = palette$digits + ) - stack_noNAs <- terra::classify(x$sample[[what]][[who]], cbind(NA, NA, 0)) + stack_noNAs <- terra::classify(x$sample[[what]][[who]], cbind(NA, NA, 0)) if (!identical(terra::values(stack_noNAs), terra::values(x$sample[[what]][[who]]))) { - warning("The p-values for an assignment samples containing only missing values are considered as 0.") + warning("The assignment test occurred at location(s) with unknown isoscape value(s); p-values set to 0 for such (a) location(s).") } - - map <- rasterVis::levelplot(stack_noNAs, #x$sample[[what]][[who]] * (x$sample$pv[[who]] > cutoff$level) - maxpixels = prod(dim(stack_noNAs)[1:2]), - margin = FALSE, - col.regions = colours$all_cols, - at = colours$at, - colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), - main = main_title) + + map <- rasterVis::levelplot(stack_noNAs, # x$sample[[what]][[who]] * (x$sample$pv[[who]] > cutoff$level) + maxpixels = prod(dim(stack_noNAs)[1:2]), + margin = FALSE, + col.regions = colours$all_cols, + at = colours$at, + colorkey = list(labels = list(at = colours$at_keys, labels = colours$at_labels)), + main = main_title + ) } ## create the additional plot(s) - decor <- .build_additional_layers(x = x, - sources = sources, - calibs = calibs, - assigns = assigns, - borders = borders, - mask = mask, - mask2 = mask2 - ) + decor <- .build_additional_layers( + x = x, + sources = sources, + calibs = calibs, + assigns = assigns, + borders = borders, + mask = mask, + mask2 = mask2 + ) ## changing the colour below the threshold if (cutoff$level > 0) { @@ -472,21 +495,23 @@ plot.ISOFIND <- function(x, ## we add the legend for the side bar ## (thanks to Deepayan Sarkar, for solving a device opening hicup at this stage) - map$legend$right <- list(fun = latticeExtra::mergedTrellisLegendGrob, - args = list(map$legend$right, - list(fun = grid::textGrob, - args = list(label = "P-value", rot = 90) - ), - vertical = FALSE - ) - ) + map$legend$right <- list( + fun = latticeExtra::mergedTrellisLegendGrob, + args = list(map$legend$right, + list( + fun = grid::textGrob, + args = list(label = "P-value", rot = 90) + ), + vertical = FALSE + ) + ) ## piling all layers together complete_map <- map + decor$borders_layer + decor$mask_layer + decor$mask2_layer + decor$sources_layer + decor$calibs_layer + decor$assigns_layer ## plotting - if (plot & !sphere$build) { + if (plot && !sphere$build) { ## check if prompt must appear in examples if (.data_IsoriX$IsoriX_options$dont_ask) { options(example_ask = "FALSE") ## only effective for the next example run, not the current one... @@ -504,27 +529,27 @@ If you want to build several spheres, build them one by one and do request a sin } .build_sphere(stack_noNAs[[1]], colours = colours, decor = decor) if (!sphere$keep_image) { - message(paste("IsoriX no longer delete the image used to build the sphere since it prevents rgl to work on some system. \n", - "You can always delete manually the file created which is stored here: \n", - normalizePath(file.path("IsoriX_world_image.png")))) - #file.remove("IsoriX_world_image.png") + message(paste( + "IsoriX no longer delete the image used to build the sphere since it prevents rgl to work on some system. \n", + "You can always delete manually the file created which is stored here: \n", + normalizePath(file.path("IsoriX_world_image.png")) + )) + # file.remove("IsoriX_world_image.png") } } return(invisible(complete_map)) - } .cut_and_color <- function(var, - step = NA, - range = NA, - palette = NULL, - cutoff = NA, - col_cutoff = "#909090", - n_labels = 99, - digits = 2) { - + step = NA, + range = NA, + palette = NULL, + cutoff = NA, + col_cutoff = "#909090", + n_labels = 99, + digits = 2) { var <- .summarize_values(var) ## converting rasters info into numerics if needed var <- var[!is.na(var)] max_var <- max(var) @@ -550,20 +575,22 @@ If you want to build several spheres, build them one by one and do request a sin step <- (max(range, na.rm = TRUE) - min(range, na.rm = TRUE)) / (n_labels - 1) } where_cut <- seq(min(range, na.rm = TRUE), (max(range, na.rm = TRUE)), step) - if ((max(where_cut) < max_var) & !hard.top) { + if ((max(where_cut) < max_var) && !hard.top) { where_cut <- c(where_cut, max(where_cut) + step) n_labels <- n_labels + 1 } if ((min_var < min(where_cut)) || (max_var > max(where_cut))) { - warning(paste0("Range for palette too small! It should be at least: [", - min_var, "-", max_var, "]")) + warning(paste0( + "Range for palette too small! It should be at least: [", + min_var, "-", max_var, "]" + )) } if (!is.na(cutoff)) { where_cut <- sort(unique(c(cutoff, where_cut))) } - if (length(unique(var)) > 1) { - cats <- cut(var, where_cut, ordered_result = TRUE) ## also works on non raster - } else {## case if no variation + if (length(unique(var)) > 1) { + cats <- cut(var, where_cut, ordered_result = TRUE) ## also works on non raster + } else { ## case if no variation cats <- as.factor(where_cut) where_cut <- c(where_cut, where_cut + 1e-10) warning("There is no variation in the map!") @@ -586,7 +613,7 @@ If you want to build several spheres, build them one by one and do request a sin if (sum(at_keys %% 1) == 0) { digits <- 0 } - at_labels <- formatC(round(at_keys, digits = digits), digits = digits, format = "f") + at_labels <- formatC(round(at_keys, digits = digits), digits = digits, format = "f") return(list(cols = cols, at = where_cut, all_cols = all_cols, at_keys = at_keys, at_labels = at_labels)) } @@ -594,11 +621,10 @@ If you want to build several spheres, build them one by one and do request a sin #' @method plot ISOFIT #' @exportS3Method plot ISOFIT plot.ISOFIT <- function(x, cex_scale = 0.2, ...) { - if (!inherits(x, "ISOFIT")) { stop("This function must be called on an object of class ISOFIT.") } - + ## Test if RStudio is in use RStudio <- .Platform$GUI == "RStudio" @@ -613,11 +639,11 @@ plot.ISOFIT <- function(x, cex_scale = 0.2, ...) { ## Define mfrow (number of rows and column in panel) mfrow <- switch(as.character(nplot), - "1" = c(1, 1), - "2" = c(1, 2), - "3" = c(1, 3), - "4" = c(2, 2), - stop("The nplot value was not anticipated.") + "1" = c(1, 1), + "2" = c(1, 2), + "3" = c(1, 3), + "4" = c(2, 2), + stop("The nplot value was not anticipated.") ) ## Setup the graphic device @@ -625,17 +651,17 @@ plot.ISOFIT <- function(x, cex_scale = 0.2, ...) { ## Plots from spaMM spaMM::plot.HLfit(x$mean_fit, - "predict", - cex = 0.1 + cex_scale*log(x$mean_fit$data$weights_mean), - las = 1, ... + "predict", + cex = 0.1 + cex_scale * log(x$mean_fit$data$weights_mean), + las = 1, ... ) graphics::title(main = "Pred vs Obs in mean_fit") .hit_return() spaMM::plot.HLfit(x$disp_fit, - "predict", - cex = 0.1 + cex_scale*log(x$disp_fit$data$weights_disp), - las = 1, ... + "predict", + cex = 0.1 + cex_scale * log(x$disp_fit$data$weights_disp), + las = 1, ... ) graphics::title(main = "Pred vs Obs in disp_fit") @@ -655,7 +681,7 @@ plot.ISOFIT <- function(x, cex_scale = 0.2, ...) { ## Reset the graphic device graphics::par(mfrow = c(1, 1)) } else { - for (fit in 1:length(x$multi_fits)) { + for (fit in seq_along(x$multi_fits)) { cat("\n") cat(paste("##### Plots for pair of models", names(x$multi_fits)[fit]), "#####") cat("\n") @@ -671,41 +697,40 @@ plot.ISOFIT <- function(x, cex_scale = 0.2, ...) { ## This function should not be called by the user. ## It plots the Matern autocorrelation. rho <- spaMM::get_ranPars(model, which = "corrPars")[[1]]$rho - nu <- spaMM::get_ranPars(model, which = "corrPars")[[1]]$nu + nu <- spaMM::get_ranPars(model, which = "corrPars")[[1]]$nu d_stop <- FALSE d <- 0 - - while ((d < 50000) & !d_stop) { + + while ((d < 50000) && !d_stop) { d <- d + 10 m <- spaMM::MaternCorr(d = d, rho = rho, nu = nu) if (m < limit) d_stop <- TRUE } - + distances <- seq(0, d, 1) - + if (length(distances) < 30) { - d_stop <- FALSE d <- 0 - - while ((d < 30) & !d_stop) { + + while ((d < 30) && !d_stop) { d <- d + 1 m <- spaMM::MaternCorr(d = d, rho = rho, nu = nu) if (m < limit) d_stop <- TRUE } - + distances <- seq(0, d, 0.1) } - + m <- spaMM::MaternCorr(d = distances, rho = rho, nu = nu) graphics::plot(m ~ distances, - type = "l", - las = 1, - xlab = "Distances (km)", - ylab = "Correlation", - ... + type = "l", + las = 1, + xlab = "Distances (km)", + ylab = "Correlation", + ... ) } @@ -725,7 +750,6 @@ plot.CALIBFIT <- function(x, CI = list(show = TRUE, col = "blue"), plot = TRUE, ...) { - .complete_args(plot.CALIBFIT) plotting_calibfit(x = x, pch = pch, col = col, line = line, CI = CI, xlab = xlab, ylab = ylab, xlim = xlim, ylim = ylim, points = FALSE, plot = plot, ...) @@ -743,43 +767,45 @@ points.CALIBFIT <- function(x, plot = TRUE, ...) { .complete_args(points.CALIBFIT) - + plotting_calibfit(x = x, pch = pch, col = col, line = line, CI = CI, xlab = NULL, ylab = NULL, points = TRUE, plot = plot, ...) } plotting_calibfit <- function(x, pch, col, line, CI, xlab, ylab, xlim = NULL, ylim = NULL, points = FALSE, plot = TRUE, ...) { - if (!inherits(x, "CALIBFIT")) { stop("This function must be called on an object of class CALIBFIT.") } - + if (x$method == "desk" && !points && (is.null(xlim) || is.null(ylim))) { stop("Since no calibration points have been loaded, xlim & ylim must be defined to indicate the range of the x- and y-axes. Call again the plotting function again after adding e.g. xlim = c(-100, 0), ylim = c(-45, -15) in the function call.") } - + if (x$method == "desk" && points && is.null(xlim) && is.null(ylim)) { xlim <- ylim <- c(-1e6, 1e6) } - + x_var <- switch(x$method, - wild = x$data$mean_source_value, - lab = x$data$source_value, - desk = xlim) - + wild = x$data$mean_source_value, + lab = x$data$source_value, + desk = xlim + ) + y_var <- switch(x$method, - wild = x$data$sample_value, - lab = x$data$sample_value, - desk = ylim) + wild = x$data$sample_value, + lab = x$data$sample_value, + desk = ylim + ) ## prepare design matrix - xs <- with(x$data, - seq(min(x_var), - max(x_var), - length = 100 - ) + xs <- with( + x$data, + seq(min(x_var), + max(x_var), + length = 100 + ) ) X <- cbind(1, xs) - + ## compute fitted values (in all case so as to return them even if not displayed) fitted <- X %*% x$param @@ -787,15 +813,15 @@ plotting_calibfit <- function(x, pch, col, line, CI, xlab, ylab, xlim = NULL, yl fixedVar <- rowSums(X * (X %*% x$fixefCov)) ## = diag(X %*% x$fixefCov %*% t(X)) if (any(fixedVar < 0)) { fixedVar[fixedVar < 0] <- 0 - warning("Some negative estimates of variances are considered null. Negative estimates of variances are a sign that numerical problems occured during the fitting of the calibration.") + warning("Some negative estimates of variances are considered null. Negative estimates of variances are a sign that numerical problems occurred during the fitting of the calibration.") } - lwr <- fitted + stats::qnorm(0.025)*sqrt(fixedVar) - upr <- fitted + stats::qnorm(0.975)*sqrt(fixedVar) + lwr <- fitted + stats::qnorm(0.025) * sqrt(fixedVar) + upr <- fitted + stats::qnorm(0.975) * sqrt(fixedVar) if (is.null(xlim)) { xlim <- range(x_var, na.rm = TRUE) } - + if (is.null(ylim)) { if (CI$show) { ylim <- range(lwr, y_var, upr, na.rm = TRUE) @@ -803,50 +829,52 @@ plotting_calibfit <- function(x, pch, col, line, CI, xlab, ylab, xlim = NULL, yl ylim <- range(y_var, na.rm = TRUE) } } - + ## remove fake points used for the construction of the plot when using calibration method "desk" if (x$method == "desk") { col <- NULL } - + if (!points && plot) { - with(x$data, - graphics::plot.default(y_var ~ x_var, - xlab = xlab, - ylab = ylab, - xlim = xlim, - ylim = ylim, - las = 1, - pch = pch, - col = col, - ... - ) - ) + with( + x$data, + graphics::plot.default(y_var ~ x_var, + xlab = xlab, + ylab = ylab, + xlim = xlim, + ylim = ylim, + las = 1, + pch = pch, + col = col, + ... + ) + ) } else if (plot) { - with(x$data, - graphics::points.default(y_var ~ x_var, pch = pch, col = col, ...) + with( + x$data, + graphics::points.default(y_var ~ x_var, pch = pch, col = col, ...) ) } - + ## plot regression line if (line$show && plot) { graphics::points(fitted ~ xs, col = line$col, lwd = 2, type = "l") } - + ## plot CI if (CI$show && plot) { graphics::points(lwr ~ xs, col = CI$col, lty = 2, type = "l") graphics::points(upr ~ xs, col = CI$col, lty = 2, type = "l") } - + ## return for plots outside IsoriX out <- data.frame(source_value = xs, sample_fitted = fitted, sample_lwr = lwr, sample_upr = upr) - + ## tweak to please codetools::checkUsagePackage('IsoriX', skipWith = TRUE) rm(fitted, fixedVar) - + return(invisible(out)) - } +} #' @rdname plots @@ -861,20 +889,25 @@ plot.SpatRaster <- function(x, ...) { .build_additional_layers <- function(x, sources, calibs, assigns = NULL, borders, mask, mask2 = NULL) { ## This function should not be called by the user. ## It builds the additional layers for the plots. - + ## layer for sources if (!sources$draw) { sources_layer <- latticeExtra::layer() } else { - sources_layer <- latticeExtra::layer(lattice::lpoints(sources, - col = pt$col, - cex = pt$cex, - pch = pt$pch, - lwd = pt$lwd), - data = list(sources = x$sp_points$sources, - pt = sources)) - } - + sources_layer <- latticeExtra::layer( + lattice::lpoints(sources, + col = pt$col, + cex = pt$cex, + pch = pt$pch, + lwd = pt$lwd + ), + data = list( + sources = x$sp_points$sources, + pt = sources + ) + ) + } + ## layer for calibration points if (is.null(calibs)) { calibs_layer <- latticeExtra::layer() @@ -882,16 +915,21 @@ plot.SpatRaster <- function(x, ...) { if (!calibs$draw) { calibs_layer <- latticeExtra::layer() } else { - calibs_layer <- latticeExtra::layer(lattice::lpoints(calibs, - col = pt$col, - cex = pt$cex, - pch = pt$pch, - lwd = pt$lwd), - data = list(calibs = x$sp_points$calibs, - pt = calibs)) + calibs_layer <- latticeExtra::layer( + lattice::lpoints(calibs, + col = pt$col, + cex = pt$cex, + pch = pt$pch, + lwd = pt$lwd + ), + data = list( + calibs = x$sp_points$calibs, + pt = calibs + ) + ) } } - + ## layer for assignment points if (is.null(assigns)) { assigns_layer <- latticeExtra::layer() @@ -899,55 +937,70 @@ plot.SpatRaster <- function(x, ...) { if (!assigns$draw) { assigns_layer <- latticeExtra::layer() } else { - assigns_layer <- latticeExtra::layer(lattice::lpoints(assigns, - col = pt$col, - cex = pt$cex, - pch = pt$pch, - lwd = pt$lwd), - data = list(assigns = x$sp_points$assigns, - pt = assigns)) + assigns_layer <- latticeExtra::layer( + lattice::lpoints(assigns, + col = pt$col, + cex = pt$cex, + pch = pt$pch, + lwd = pt$lwd + ), + data = list( + assigns = x$sp_points$assigns, + pt = assigns + ) + ) } } - + ## layer for country borders if (is.null(borders$borders)) { borders_layer <- latticeExtra::layer() - } else { - borders_layer <- latticeExtra::layer(lattice::lpolygon(b$borders, - lwd = b$lwd, - border = b$col), - data = list(b = borders)) + } else { + borders_layer <- latticeExtra::layer( + lattice::lpolygon(b$borders, + lwd = b$lwd, + border = b$col + ), + data = list(b = borders) + ) } - + ## layer for mask if (is.null(mask$mask)) { mask_layer <- latticeExtra::layer() } else { - mask_layer <- latticeExtra::layer(lattice::lpolygon(m$mask, - col = m$fill, - border = m$col, - lwd = m$lwd), - data = list(m = mask)) + mask_layer <- latticeExtra::layer( + lattice::lpolygon(m$mask, + col = m$fill, + border = m$col, + lwd = m$lwd + ), + data = list(m = mask) + ) } - + if (is.null(mask2$mask)) { mask2_layer <- latticeExtra::layer() } else { - mask2_layer <- latticeExtra::layer(lattice::lpolygon(m$mask, - col = m$fill, - border = m$col, - lwd = m$lwd), - data = list(m = mask2)) - } - - layers <- list(sources_layer = sources_layer, - calibs_layer = calibs_layer, - borders_layer = borders_layer, - assigns_layer = assigns_layer, - mask_layer = mask_layer, - mask2_layer = mask2_layer + mask2_layer <- latticeExtra::layer( + lattice::lpolygon(m$mask, + col = m$fill, + border = m$col, + lwd = m$lwd + ), + data = list(m = mask2) + ) + } + + layers <- list( + sources_layer = sources_layer, + calibs_layer = calibs_layer, + borders_layer = borders_layer, + assigns_layer = assigns_layer, + mask_layer = mask_layer, + mask2_layer = mask2_layer ) - + ## tweak to please R CMD check b <- m <- pt <- NULL return(layers) diff --git a/IsoriX/R/prepcipitate.R b/IsoriX/R/prepcipitate.R index b771820..287fe1f 100644 --- a/IsoriX/R/prepcipitate.R +++ b/IsoriX/R/prepcipitate.R @@ -1,65 +1,63 @@ #' Prepare the raster brick containing the precipitation data -#' +#' #' This functions turns the WorldClim data downloaded using the function #' [getprecip] into a *RasterBrick* of same resolution and #' extent as the structural raster. This function is designed to be used with #' [isomultiscape]. -#' +#' #' @param path A *string* indicating the path where the WorldClim data have #' been downloaded. If the path is null (the default) the function will assume -#' that the folder containing the precipitation data is in the current +#' that the folder containing the precipitation data is in the current #' directory #' @param raster A *raster* containing the structural raster -#' @param verbose A *logical* indicating whether information about the -#' progress of the procedure should be displayed or not while the function is -#' running. By default verbose is `TRUE` if users use an interactive R +#' @param verbose A *logical* indicating whether information about the +#' progress of the procedure should be displayed or not while the function is +#' running. By default verbose is `TRUE` if users use an interactive R #' session, and `FALSE` otherwise. -#' +#' #' @seealso -#' +#' #' [getprecip] to download the relevant precipitation data -#' +#' #' [PrecipBrickDE] for the stored precipitation data for Germany -#' +#' #' [prepelev] to prepare an elevation raster -#' +#' #' @examples -#' +#' #' ## The following example takes some time and download a large amount of data (~ 1 Gb). #' ## It will therefore not be run unless you uncomment it -#' +#' #' ### We fit the models for Germany: -#' #GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' # GNIPDataDEagg <- prepsources(data = GNIPDataDE) #' # -#' #GermanFit <- isofit(data = GNIPDataDEagg, +#' # GermanFit <- isofit(data = GNIPDataDEagg, #' # mean_model_fix = list(elev = TRUE, lat.abs = TRUE)) #' # #' ### We prepare the structural raster: -#' #StrRaster <- prepraster(raster = ElevRasterDE, -#' # isofit = GermanFit, -#' # aggregation_factor = 0) +#' # StrRaster <- prepraster(raster = ElevRasterDE, +#' # isofit = GermanFit, +#' # aggregation_factor = 0) #' # #' ### We download the precipitation data: -#' #getprecip(path = "~/Downloads/") +#' # getprecip(path = "~/Downloads/") #' # #' ### We prepare the raster brick with all the precipitation data: -#' #PrecipitationBrick <- prepcipitate(path = "~/Downloads/", -#' # raster = StrRaster) +#' # PrecipitationBrick <- prepcipitate(path = "~/Downloads/", +#' # raster = StrRaster) #' # #' ### We plot the precipitation data: -#' #levelplot(PrecipitationBrick) +#' # levelplot(PrecipitationBrick) #' #' @export prepcipitate <- function(path = NULL, raster, - verbose = interactive() - ) { - + verbose = interactive()) { ## Prepare path if (!is.null(path)) { path <- normalizePath(path, mustWork = FALSE) } - + if (is.null(path)) { path <- paste0(getwd(), "/wc2.1_30s_prec") } else { @@ -67,25 +65,25 @@ prepcipitate <- function(path = NULL, path <- paste0(path, "/wc2.1_30s_prec") } } - + ## List the tif files list_tif <- list.files(path = path, pattern = "\\.tif$") - + ## Checks if the tif files are there if (length(list_tif) == 0) { stop("There is no *.tif file in path... you may have the path wrong or you may not have downloaded the file using 'getprecip()'.") } - + ## Checks if the tif files are the good ones if (!all(paste0("wc2.1_30s_prec_", formatC(1:12, digits = 0, width = 2, format = "f", flag = 0), ".tif") %in% list_tif)) { stop("The '.tif' files do not have expected names: 'wc2.1_30s_prec_01.tif', 'wc2.1_30s_prec_02.tif', ...") } - + ## Small function to get the name of a given file getfilename <- function(month) { paste0(path, "/wc2.1_30s_prec_", formatC(month, digits = 0, width = 2, format = "f", flag = 0), ".tif") } - + ## Import and resize rasters one by one for (month in 1:12) { if (verbose) { @@ -94,15 +92,16 @@ prepcipitate <- function(path = NULL, tmp.raster <- terra::rast(getfilename(month)) ## crop before resampling to save a lot of time tmp.raster <- .crop_withmargin(tmp.raster, - xmin = terra::xmin(raster), - xmax = terra::xmax(raster), - ymin = terra::ymin(raster), - ymax = terra::ymax(raster), - margin_pct = 10) # 10% hardcoded, probably fine for most case + xmin = terra::xmin(raster), + xmax = terra::xmax(raster), + ymin = terra::ymin(raster), + ymax = terra::ymax(raster), + margin_pct = 10 + ) # 10% hardcoded, probably fine for most case assign(paste0("month_", month), terra::resample(x = tmp.raster, y = raster)) rm(tmp.raster) } - + ## Put all rasters in a RasterBrick precip <- terra::rast(mget(paste0("month_", 1:12))) return(precip) diff --git a/IsoriX/R/prepraster.R b/IsoriX/R/prepraster.R index 6d8f1ff..e639a12 100644 --- a/IsoriX/R/prepraster.R +++ b/IsoriX/R/prepraster.R @@ -30,7 +30,7 @@ #' limitations as plotting polygons or points on top of that remains problematic #' (see example bellow). We will work on this on the future but we have other #' priorities for now (let us know if you really need this feature). -#' +#' #' @inheritParams getelev #' @param raster The structural raster (*SpatRaster*) #' @param isofit The fitted isoscape model returned by the function [isofit] @@ -55,88 +55,90 @@ #' predictions. #' @seealso [ElevRasterDE] for information on elevation rasters, which can be #' used as structural rasters. -#' +#' #' @keywords utilities #' @examples -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 30) { -#' -#' ## We fit the models for Germany -#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' -#' GermanFit <- isofit(data = GNIPDataDEagg, -#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -#' -#' ### Let's explore the difference between aggregation schemes -#' -#' ## We aggregate and crop using different settings -#' ElevationRaster1 <- prepraster( +#' +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We fit the models for Germany +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' GermanFit <- isofit( +#' data = GNIPDataDEagg, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' ### Let's explore the difference between aggregation schemes +#' +#' ## We aggregate and crop using different settings +#' ElevationRaster1 <- prepraster( #' raster = ElevRasterDE, #' isofit = GermanFit, #' margin_pct = 0, -#' aggregation_factor = 0) -#' -#' ElevationRaster2 <- prepraster( +#' aggregation_factor = 0 +#' ) +#' +#' ElevationRaster2 <- prepraster( #' raster = ElevRasterDE, #' isofit = GermanFit, #' margin_pct = 5, -#' aggregation_factor = 5) -#' -#' ElevationRaster3 <- prepraster( +#' aggregation_factor = 5 +#' ) +#' +#' ElevationRaster3 <- prepraster( #' raster = ElevRasterDE, #' isofit = GermanFit, #' margin_pct = 10, -#' aggregation_factor = 5, aggregation_fn = max) -#' -#' ## We plot the outcome of the 3 different aggregation schemes using terra -#' -#' oripar <- par(mfrow = c(1, 3)) ## display 3 plots side-by-side -#' -#' plot(ElevationRaster1, main = "Original small raster") -#' polys(CountryBorders) -#' polys(OceanMask, col = "blue") -#' -#' plot(ElevationRaster2, main = "Small raster aggregated (by mean)") -#' polys(CountryBorders) -#' polys(OceanMask, col = "blue") -#' -#' plot(ElevationRaster3, main = "Small raster aggregated (by max)") -#' polys(CountryBorders) -#' polys(OceanMask, col = "blue") -#' -#' par(oripar) ## restore graphical settings +#' aggregation_factor = 5, aggregation_fn = max +#' ) +#' +#' ## We plot the outcome of the 3 different aggregation schemes using terra +#' +#' oripar <- par(mfrow = c(1, 3)) ## display 3 plots side-by-side +#' +#' plot(ElevationRaster1, main = "Original small raster") +#' polys(CountryBorders) +#' polys(OceanMask, col = "blue") +#' +#' plot(ElevationRaster2, main = "Small raster aggregated (by mean)") +#' polys(CountryBorders) +#' polys(OceanMask, col = "blue") +#' +#' plot(ElevationRaster3, main = "Small raster aggregated (by max)") +#' polys(CountryBorders) +#' polys(OceanMask, col = "blue") +#' +#' par(oripar) ## restore graphical settings #' } -#' +#' #' ## The examples below will only be run if sufficient time is allowed #' ## You can change that by typing e.g. options_IsoriX(example_maxtime = XX) #' ## if you want to allow for examples taking up to ca. XX seconds to run #' ## (so don't write XX but put a number instead!) -#' -#' if(getOption_IsoriX("example_maxtime") > 10) { -#' -#' ### Let's create a raster centered around the pacific -#' -#' ## We first create an empty raster -#' EmptyRaster <- rast(matrix(0, ncol = 360, nrow = 180)) -#' ext(EmptyRaster) <- c(-180, 180, -90, 90) -#' crs(EmptyRaster) <- "+proj=longlat +datum=WGS84" -#' -#' ## We crop it around the pacific -#' PacificA <- prepraster(EmptyRaster, manual_crop = c(110, -70, -90, 90)) -#' ext(PacificA) # note that the extent has changed! -#' -#' ## We plot (note the use of the function shift()!) -#' plot(PacificA, col = "blue", legend = FALSE) -#' polys(CountryBorders, col = "black") -#' polys(shift(CountryBorders, dx = 360), col = "black") -#' +#' +#' if (getOption_IsoriX("example_maxtime") > 10) { +#' ### Let's create a raster centered around the pacific +#' +#' ## We first create an empty raster +#' EmptyRaster <- rast(matrix(0, ncol = 360, nrow = 180)) +#' ext(EmptyRaster) <- c(-180, 180, -90, 90) +#' crs(EmptyRaster) <- "+proj=longlat +datum=WGS84" +#' +#' ## We crop it around the pacific +#' PacificA <- prepraster(EmptyRaster, manual_crop = c(110, -70, -90, 90)) +#' ext(PacificA) # note that the extent has changed! +#' +#' ## We plot (note the use of the function shift()!) +#' plot(PacificA, col = "blue", legend = FALSE) +#' polys(CountryBorders, col = "black") +#' polys(shift(CountryBorders, dx = 360), col = "black") #' } -#' +#' #' @export prepraster <- function(raster, isofit = NULL, @@ -145,21 +147,19 @@ prepraster <- function(raster, aggregation_fn = mean, manual_crop = NULL, values_to_zero = c(-Inf, 0), - verbose = interactive() -) { - + verbose = interactive()) { time <- system.time({ - if (!is.null(isofit)) { ## test if cropping is needed + if (!is.null(isofit)) { ## test if cropping is needed if (inherits(isofit, "multiisofit")) { isofit <- isofit$multi.fits[[1]] } if (!is.null(manual_crop)) stop("cannot crop both according to sources and manually! Make up your choice.") - if (## test if the raster is not smaller than the area covered by the sources. + if ( ## test if the raster is not smaller than the area covered by the sources. ## If yes crop will not proceed! terra::xmin(raster) > min(isofit$mean_fit$data$long) | - terra::xmax(raster) < max(isofit$mean_fit$data$long) | - terra::ymin(raster) > min(isofit$mean_fit$data$lat) | - terra::ymax(raster) < max(isofit$mean_fit$data$lat) + terra::xmax(raster) < max(isofit$mean_fit$data$long) | + terra::ymin(raster) > min(isofit$mean_fit$data$lat) | + terra::ymax(raster) < max(isofit$mean_fit$data$lat) ) { warning("the cropping may not make sense (sources located outside structural raster)") } @@ -167,61 +167,75 @@ prepraster <- function(raster, if (verbose) { print(paste("cropping...")) } - + ## crop is performed: raster <- .crop_withmargin(raster, - xmin = min(isofit$mean_fit$data$long), - xmax = max(isofit$mean_fit$data$long), - ymin = min(isofit$mean_fit$data$lat), - ymax = max(isofit$mean_fit$data$lat), - margin_pct = margin_pct) + xmin = min(isofit$mean_fit$data$long), + xmax = max(isofit$mean_fit$data$long), + ymin = min(isofit$mean_fit$data$lat), + ymax = max(isofit$mean_fit$data$lat), + margin_pct = margin_pct + ) } else { if (length(manual_crop) == 4) { - if ((manual_crop[1] > manual_crop[2]) && (manual_crop[3] < manual_crop[4])) { - crop1 <- terra::crop(raster, - terra::ext(terra::xmin(terra::ext(raster)), - manual_crop[2], - manual_crop[3], - manual_crop[4])) - crop2 <- terra::crop(raster, - terra::ext(manual_crop[1], - terra::xmax(terra::ext(raster)), - manual_crop[3], - manual_crop[4])) - raster <- terra::shift(terra::merge(crop1, terra::shift(crop2, - dx = -360)), - dx = 360) + crop1 <- terra::crop( + raster, + terra::ext( + terra::xmin(terra::ext(raster)), + manual_crop[2], + manual_crop[3], + manual_crop[4] + ) + ) + crop2 <- terra::crop( + raster, + terra::ext( + manual_crop[1], + terra::xmax(terra::ext(raster)), + manual_crop[3], + manual_crop[4] + ) + ) + raster <- terra::shift( + terra::merge(crop1, terra::shift(crop2, + dx = -360 + )), + dx = 360 + ) warning("The first longitude is greater than the second one. You may want this to study something around the pacific. This feature is not fully supported... but... the function prepraster() tried to cope with this. That implies a change in the coordinate system (0:360 instead of -180:180). This should create no problem for ploting isoscapes but this can create troubles to add polygons or points on the maps. If that is the case, you need to add 360 degree to the longitudes... If all that sounds complicated, just stick to a first longitude SMALLER than the second one.") } else { raster <- terra::crop(raster, manual_crop) } } } - if (aggregation_factor > 1) { ## test if aggregation is needed + if (aggregation_factor > 1) { ## test if aggregation is needed if (interactive()) { print(paste("aggregating...")) } - raster <- terra::aggregate(raster, fact = aggregation_factor, fun = aggregation_fn) ## aggregation + raster <- terra::aggregate(raster, + fact = aggregation_factor, fun = aggregation_fn, + na.rm = TRUE, ## aggregation introduces NAs if the ratio before/after aggreg is not whole in both directions + ## (`raster::aggregate()` implied `na.rm = TRUE` by default, `terra::aggregate()` does not) + cores = .data_IsoriX$IsoriX_options$Ncpu + ) } }) - + ## applies values_to_zero transformation terra::values(raster) <- ifelse(terra::values(raster) < max(values_to_zero) & terra::values(raster) > min(values_to_zero), 0, terra::values(raster)) - - ## store the raster in memory if possible - - raster_HD <- raster - raster <- terra::rast(raster_HD) - terra::values(raster) <- terra::values(raster_HD) - - + + ## store the raster in memory if possible ## I think the following trick made for **raster** is useless in **tera** but I am not sure + # raster_HD <- raster + # raster <- terra::rast(raster_HD) + # terra::values(raster) <- terra::values(raster_HD) + + if (verbose) { print(paste("done!")) print(time) } - + print(raster) return(raster) } - diff --git a/IsoriX/R/prepsources.R b/IsoriX/R/prepsources.R index f116740..f03489c 100644 --- a/IsoriX/R/prepsources.R +++ b/IsoriX/R/prepsources.R @@ -78,52 +78,64 @@ #' @examples #' ## Create a processed dataset for Germany #' GNIPDataDEagg <- prepsources(data = GNIPDataDE) -#' +#' #' head(GNIPDataDEagg) -#' +#' #' ## Create a processed dataset for Germany per month -#' GNIPDataDEmonthly <-prepsources(data = GNIPDataDE, -#' split_by = "month") -#' +#' GNIPDataDEmonthly <- prepsources( +#' data = GNIPDataDE, +#' split_by = "month" +#' ) +#' #' head(GNIPDataDEmonthly) -#' +#' #' ## Create a processed dataset for Germany per year -#' GNIPDataDEyearly <- prepsources(data = GNIPDataDE, -#' split_by = "year") -#' +#' GNIPDataDEyearly <- prepsources( +#' data = GNIPDataDE, +#' split_by = "year" +#' ) +#' #' head(GNIPDataDEyearly) -#' +#' #' ## Create isoscape-dataset for warm months in germany between 1995 and 1996 -#' GNIPDataDEwarm <- prepsources(data = GNIPDataDE, -#' month = 5:8, -#' year = 1995:1996) -#' +#' GNIPDataDEwarm <- prepsources( +#' data = GNIPDataDE, +#' month = 5:8, +#' year = 1995:1996 +#' ) +#' #' head(GNIPDataDEwarm) -#' -#' +#' +#' #' ## Create a dataset with 90% of obs -#' GNIPDataDE90pct <- prepsources(data = GNIPDataDE, -#' prop_random = 0.9, -#' random_level = "obs") -#' +#' GNIPDataDE90pct <- prepsources( +#' data = GNIPDataDE, +#' prop_random = 0.9, +#' random_level = "obs" +#' ) +#' #' lapply(GNIPDataDE90pct, head) # show beginning of both datasets -#' +#' #' ## Create a dataset with half the weather sources -#' GNIPDataDE50pctsources <- prepsources(data = GNIPDataDE, -#' prop_random = 0.5, -#' random_level = "source") -#' +#' GNIPDataDE50pctsources <- prepsources( +#' data = GNIPDataDE, +#' prop_random = 0.5, +#' random_level = "source" +#' ) +#' #' lapply(GNIPDataDE50pctsources, head) #' #' #' ## Create a dataset with half the weather sources split per month -#' GNIPDataDE50pctsourcesMonthly <- prepsources(data = GNIPDataDE, -#' split_by = "month", -#' prop_random = 0.5, -#' random_level = "source") -#' +#' GNIPDataDE50pctsourcesMonthly <- prepsources( +#' data = GNIPDataDE, +#' split_by = "month", +#' prop_random = 0.5, +#' random_level = "source" +#' ) +#' #' lapply(GNIPDataDE50pctsourcesMonthly, head) -#' +#' #' @export prepsources <- function(data, month = 1:12, @@ -141,46 +153,46 @@ prepsources <- function(data, col_long = "long", col_elev = "elev", col_month = "month", - col_year = "year" -) { - + col_year = "year") { ## Some checks - if (any(month %% 1 != 0) | any(month < 1) | any(month > 12)) { + if (any(month %% 1 != 0) || any(month < 1) || any(month > 12)) { stop("Months must be provided as a vector of integers and should be between 1 and 12.") } - + if (prop_random > 1) { stop("The value you entered for prop_random is > 1. It must be a proportion (so between 0 and 1)!") } - + ## Handle missing data if (missing("year")) year <- sort(unique(data[, col_year, drop = TRUE], na.rm = TRUE)) ## Handle the month column and convert all months to numbers data[, col_month] <- .converts_months_to_numbers(data[, col_month, drop = TRUE]) - + ## Prepare selection - month_select <- data[, col_month, drop = TRUE] %in% month + month_select <- data[, col_month, drop = TRUE] %in% month year_select <- data[, col_year, drop = TRUE] %in% year long_select <- data[, col_long, drop = TRUE] >= long_min & data[, col_long, drop = TRUE] <= long_max - lat_select <- data[, col_lat, drop = TRUE] >= lat_min & data[, col_lat, drop = TRUE] <= lat_max - all_select <- month_select & year_select & long_select & lat_select - + lat_select <- data[, col_lat, drop = TRUE] >= lat_min & data[, col_lat, drop = TRUE] <= lat_max + all_select <- month_select & year_select & long_select & lat_select + ## Apply selection - query_data <- data[all_select, ,drop = TRUE] - + query_data <- data[all_select, , drop = TRUE] + ## Defining function returning unique values with test unique2 <- function(x, key) { u_x <- unique(x) - if (length(u_x) == 1) return(u_x) + if (length(u_x) == 1) { + return(u_x) + } warning(paste(c("Some", key, "values are not unique but should be, so the first element was taken among:", u_x, "."), collapse = " ")) return(u_x[1]) } - + ## Defining function for aggregation aggregate_data <- function(d, split_by = split_by) { d <- droplevels(d) - + ## Create the variable used for the split if (is.null(split_by)) { split <- d[, col_source_ID, drop = TRUE] @@ -191,25 +203,26 @@ prepsources <- function(data, } else { stop("The argument used for 'split_by' is unknown.") } - + ## Perform the aggregation - df <- data.frame(split = factor(c(tapply(as.character(split), split, unique2, key = "split"))), - source_ID = factor(c(tapply(as.character(d[, col_source_ID, drop = TRUE]), split, unique2, key = "source_ID"))), - mean_source_value = c(tapply(d[, col_source_value, drop = TRUE], split, mean, na.rm = TRUE)), - var_source_value = c(tapply(d[, col_source_value, drop = TRUE], split, stats::var, na.rm = TRUE)), - n_source_value = c(tapply(d[, col_source_ID, drop = TRUE], split, length)), - lat = c(tapply(d[, col_lat, drop = TRUE], split, unique2, key = "latitude")), - long = c(tapply(d[, col_long, drop = TRUE], split, unique2, key = "longitude")), - elev = c(tapply(d[, col_elev, drop = TRUE], split, unique2, key = "elevation")) + df <- data.frame( + split = factor(c(tapply(as.character(split), split, unique2, key = "split"))), + source_ID = factor(c(tapply(as.character(d[, col_source_ID, drop = TRUE]), split, unique2, key = "source_ID"))), + mean_source_value = c(tapply(d[, col_source_value, drop = TRUE], split, mean, na.rm = TRUE)), + var_source_value = c(tapply(d[, col_source_value, drop = TRUE], split, stats::var, na.rm = TRUE)), + n_source_value = c(tapply(d[, col_source_ID, drop = TRUE], split, length)), + lat = c(tapply(d[, col_lat, drop = TRUE], split, unique2, key = "latitude")), + long = c(tapply(d[, col_long, drop = TRUE], split, unique2, key = "longitude")), + elev = c(tapply(d[, col_elev, drop = TRUE], split, unique2, key = "elevation")) ) ## Note that above the c() prevent the creation of 1d arrays that are troublesome in spaMM - + null.var <- !is.na(df$var_source_value) & df$var_source_value == 0 if (sum(null.var) > 0) { df$var_source_value[null.var] <- 0.01 warnings(paste(length(null.var), "Null variances were obtained during aggregation. They were changed to 0.01 assuming that the actual variance cannot be smaller than the measurement error variance.")) } - + ## Retrieve the relevant splitting information if (is.null(split_by)) { df <- df[order(df$source_ID), ] @@ -220,46 +233,44 @@ prepsources <- function(data, df$year <- as.numeric(unlist(lapply(strsplit(as.character(df$split), split = "_", fixed = TRUE), function(i) i[2]))) df <- df[order(df$source_ID, df$year), ] } - + ## Clean-up and output df$split <- NULL df <- droplevels(df[!is.na(df$mean_source_value), ]) rownames(df) <- NULL return(df) } - + ## Return aggregated data if no random selection is needed if (prop_random == 0) { return(aggregate_data(query_data, split_by = split_by)) } - + ## Random draw of observations if (random_level == "obs") { howmanylines <- round(prop_random * nrow(query_data)) whichlines <- sample(x = nrow(query_data), size = howmanylines, replace = FALSE) - selected_data <- query_data[whichlines, ] + selected_data <- query_data[whichlines, ] remaining_data <- query_data[-whichlines, ] - return(list(selected_data = aggregate_data(selected_data, split_by = split_by), - remaining_data = aggregate_data(remaining_data, split_by = split_by) - ) - ) - } - + return(list( + selected_data = aggregate_data(selected_data, split_by = split_by), + remaining_data = aggregate_data(remaining_data, split_by = split_by) + )) + } + ## Random draw of source_ID if (random_level == "source") { howmanysources <- round(prop_random * length(unique(query_data[, col_source_ID]))) whichssources <- sample(x = unique(query_data[, col_source_ID]), size = howmanysources, replace = FALSE) dolines <- query_data[, col_source_ID] %in% whichssources - selected_data <- query_data[dolines, ] + selected_data <- query_data[dolines, ] remaining_data <- query_data[!dolines, ] - return(list(selected_data = aggregate_data(selected_data, split_by = split_by), - remaining_data = aggregate_data(remaining_data, split_by = split_by) - ) - ) + return(list( + selected_data = aggregate_data(selected_data, split_by = split_by), + remaining_data = aggregate_data(remaining_data, split_by = split_by) + )) } - + ## Display error if no return encountered before stop("The argument you chose for random_level is unknown.") - } - \ No newline at end of file diff --git a/IsoriX/R/reexport.R b/IsoriX/R/reexport.R index 0bfa880..3472143 100644 --- a/IsoriX/R/reexport.R +++ b/IsoriX/R/reexport.R @@ -89,4 +89,15 @@ terra::values #' @importFrom terra cellSize #' @export -terra::cellSize \ No newline at end of file +terra::cellSize + + +# Imports not reexported -------------------------------------------------- + +#' @importFrom terra saveRDS +#' @export +NULL # to not shadow IsoriX doc + +#' @importFrom terra readRDS +#' @export +NULL # to not shadow IsoriX doc diff --git a/IsoriX/R/serialize.R b/IsoriX/R/serialize.R new file mode 100644 index 0000000..2633c3d --- /dev/null +++ b/IsoriX/R/serialize.R @@ -0,0 +1,225 @@ +#' Save and read objects produced by IsoriX using RDS files +#' +#' Because files created with IsoriX contain [`terra::SpatRaster`] and +#' [`terra::SpatVector`] objects, they cannot be saved using [`base::saveRDS`] +#' or [`base::save`] functions. The reason is that objects created with [terra] +#' point to data stored in memory which are not contained in the R objects +#' themselves. Adapting the approach implemented in the [terra] package, we +#' provide a wrapper for [`base::saveRDS`] and [`base::readRDS`] functions, +#' which allows one to save and read objects produced with IsoriX by simply +#' using `saveRDS()` and `readRDS()`. +#' +#' [`base::saveRDS`] and [`base::readRDS`] are standard S3 functions. So in +#' order to be able to have a specific behaviour for objects produced with +#' IsoriX, we imported `saveRDS` and `readRDS` S4 generics from [terra] to +#' dispatch both S3 and S4 IsoriX-specific methods (see [Methods_for_S3]). The +#' S3 implementation is consistent with the rest of the package and presents all +#' usual benefits associated with S3 methods (e.g. simple access to the code). +#' The S4 implementation makes IsoriX methods compatible with the use of +#' [`terra::saveRDS`] and [`terra::readRDS`]. +#' +#' @param object (definition copied from [`base::readRDS`]:) R object to serialize. +#' @param file (definition copied from [`base::readRDS`]:) a connection or the name of the file where the R object is saved to or read from. +#' @param ascii (definition copied from [`base::readRDS`]:) a logical. If `TRUE` or `NA`, an ASCII representation is written; otherwise (default), a binary one is used. See the comments in the help for [`base::save`]. +#' @param version (definition copied from [`base::readRDS`]:) the workspace format version to use. `NULL` specifies the current default version (3). The only other supported value is `2`, the default from R 1.4.0 to R 3.5.0. +#' @param compress (definition copied from [`base::readRDS`]:) a logical specifying whether saving to a named file is to use "gzip" compression, or one of "gzip", "bzip2" or "xz" to indicate the type of compression to be used. Ignored if file is a connection. +#' @param refhook (definition copied from [`base::readRDS`]:) a hook function for handling reference objects. +#' +#' @name serialize +#' @aliases serialise readRDS saveRDS +#' @keywords saving +#' +#' @return +#' For `saveRDS`, `NULL` invisibly. +#' +#' For `readRDS`, an R object. +#' +#' @examples +#' if (getOption_IsoriX("example_maxtime") > 30) { +#' ## We prepare the data +#' GNIPDataDEagg <- prepsources(data = GNIPDataDE) +#' +#' ## We fit the models +#' GermanFit <- isofit( +#' data = GNIPDataDEagg, +#' mean_model_fix = list(elev = TRUE, lat_abs = TRUE) +#' ) +#' +#' ## We build the isoscapes +#' GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) +#' +#' ## Saving as RDS +#' filename <- tempfile(fileext = ".rds") # or whatever names you want +#' saveRDS(GermanScape, file = filename) +#' +#' ## Reading RDS +#' GermanScape2 <- readRDS(filename) +#' GermanScape2 +#' +#' ## Saving data.frame object as RDS +#' filename2 <- tempfile(fileext = ".rds") # or whatever names you want +#' saveRDS(iris, file = filename2) +#' +#' ## Reading RDS containing data.frame +#' iris2 <- readRDS(filename2) +#' iris2 +#' +#' ## Saving terra object as RDS +#' filename3 <- tempfile(fileext = ".rds") # or whatever names you want +#' f <- system.file("ex/elev.tif", package = "terra") +#' r <- rast(f) +#' saveRDS(r, file = filename3) +#' +#' ## Reading RDS containing terra object +#' r2 <- readRDS(filename3) +#' r2 +#' } +#' +NULL + + +# Defining omnibus function -------------------------------------------------- + +#' @describeIn serialize S3 function to save IsoriX objects into a RDS file +#' +#' @export +#' +saveRDS_IsoriX <- function(object, file = "", ascii = FALSE, version = NULL, compress = TRUE, refhook = NULL) { + # message("Saving RDS using IsoriX method") + if (!is.null(object$isoscapes)) { + if (inherits(object$isoscapes, "SpatRaster")) { + object$isoscapes <- terra::wrap(object$isoscapes) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + } + if (!is.null(object$group)) { + object$group <- lapply(object$group, \(x) { + if (inherits(x, "SpatRaster")) { + terra::wrap(x) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + }) + } + if (!is.null(object$sample)) { + object$sample <- lapply(object$sample, \(x) { + if (inherits(x, "SpatRaster")) { + terra::wrap(x) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + }) + } + if (!is.null(object$sp_points)) { + object$sp_points <- lapply(object$sp_points, \(x) { + if (inherits(x, "SpatVector")) { + terra::wrap(x) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + }) + } + base::saveRDS(object, file = file, ascii = ascii, version = version, compress = compress, refhook = refhook) +} + + +# Defining S3 methods ----------------------------------------------------- + +#' @describeIn serialize S3 method to save an `ISOSCAPE` object into a RDS file +#' +#' @method saveRDS ISOSCAPE +#' @exportS3Method saveRDS ISOSCAPE +#' @export +#' +saveRDS.ISOSCAPE <- function(object, file = "", ascii = FALSE, version = NULL, compress = TRUE, refhook = NULL) { + saveRDS_IsoriX(object, file = file, ascii = ascii, version = version, compress = compress, refhook = refhook) +} + +#' @describeIn serialize S3 method to save a `CALIBFIT` object into a RDS file +#' +#' @method saveRDS CALIBFIT +#' @exportS3Method saveRDS CALIBFIT +#' @export +#' +saveRDS.CALIBFIT <- function(object, file = "", ascii = FALSE, version = NULL, compress = TRUE, refhook = NULL) { + saveRDS_IsoriX(object, file = file, ascii = ascii, version = version, compress = compress, refhook = refhook) +} + +#' @describeIn serialize S3 method to save an `ISOFIND` object into a RDS file +#' +#' @method saveRDS ISOFIND +#' @exportS3Method saveRDS ISOFIND +#' +saveRDS.ISOFIND <- function(object, file = "", ascii = FALSE, version = NULL, compress = TRUE, refhook = NULL) { + saveRDS_IsoriX(object, file = file, ascii = ascii, version = version, compress = compress, refhook = refhook) +} + + +#' @describeIn serialize S3 method to read an object produced with IsoriX (or other) stored in a RDS file +#' +#' @method readRDS character +#' @exportS3Method readRDS character +#' @export +#' +readRDS.character <- function(file, refhook = NULL) { + # message("Reading RDS using IsoriX wrapper") + object <- base::readRDS(file = file, refhook = refhook) + if (inherits(object, "PackedSpatRaster") || inherits(object, "PackedSpatVector")) { + return(terra::unwrap(object)) + } + if (!inherits(object, "ISOSCAPE") && !inherits(object, "CALIBFIT") && !inherits(object, "ISOFIND")) { + return(object) + } + if (!is.null(object$isoscapes)) { + if (inherits(object$isoscapes, "PackedSpatRaster")) { + object$isoscapes <- terra::unwrap(object$isoscapes) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + } + if (!is.null(object$sp_points)) { + object$sp_points <- lapply(object$sp_points, \(x) { + if (inherits(x, "PackedSpatVector")) { + terra::unwrap(x) + } else { + stop("Saving situation not implemented yet. Please contact the package maintainer.") + } + }) + } + object +} + + +# Defining S4 methods ----------------------------------------------------- + +#' @describeIn serialize S4 method to save an `ISOSCAPE` object into a RDS file +#' +#' @method saveRDS ISOSCAPE +#' @export +#' +setMethod("saveRDS", signature(object = "ISOSCAPE"), saveRDS.ISOSCAPE) + + +#' @describeIn serialize S4 method to save an `CALIBFIT` object into a RDS file +#' +#' @method saveRDS CALIBFIT +#' @export +#' +setMethod("saveRDS", signature(object = "CALIBFIT"), saveRDS.CALIBFIT) + + +#' @describeIn serialize S4 method to save an `ISOFIND` object into a RDS file +#' +#' @method saveRDS ISOFIND +#' @export +#' +setMethod("saveRDS", signature(object = "ISOFIND"), saveRDS.ISOFIND) + + +#' @describeIn serialize S4 method to read an object produced with IsoriX (or other) stored in a RDS file +#' +#' @method readRDS character +#' @export +#' +setMethod("readRDS", signature(file = "character"), readRDS.character) diff --git a/IsoriX/R/testpackage.R b/IsoriX/R/testpackage.R index 63b25fd..1eb52e4 100644 --- a/IsoriX/R/testpackage.R +++ b/IsoriX/R/testpackage.R @@ -1,68 +1,75 @@ ## Small script to test that all functions in the package work properly run <- FALSE ## To avoid cyclic issues in R CMD check -#run <- TRUE +# run <- TRUE if (run) { library(IsoriX) options_IsoriX(example_maxtime = Inf) - + example(options_IsoriX) - + example(create_aliens) - + example(AssignDataBat) example(AssignDataBat2) example(CalibDataBat) example(CalibDataBat2) CalibDataBat2$elev <- NULL foo <- paste(tempfile(), ".tif") - getelev(file = foo, z = 1, - lat_min = min(CalibDataBat2$lat), - lat_max = max(CalibDataBat2$lat), - long_min = min(CalibDataBat2$long), - long_max = max(CalibDataBat2$long)) + getelev( + file = foo, z = 1, + lat_min = min(CalibDataBat2$lat), + lat_max = max(CalibDataBat2$lat), + long_min = min(CalibDataBat2$long), + long_max = max(CalibDataBat2$long) + ) ElevationRasterBig <- rast(foo) CalibDataBat2$elev <- extract( ElevationRasterBig, - cbind(CalibDataBat2$long, CalibDataBat2$lat)) + cbind(CalibDataBat2$long, CalibDataBat2$lat) + ) head(CalibDataBat2) example(AssignDataAlien) example(CalibDataAlien) - + foo <- paste(tempfile(), ".tif") getelev(file = foo, z = 1, overwrite = TRUE) elev_raster <- terra::rast(foo) plot(elev_raster) rm(elev_raster) - + foo <- tempdir() options(timeout = 500) outpath <- getprecip(path = foo, overwrite = TRUE) - + GNIPDataDEagg <- prepsources(data = GNIPDataDE) - GermanFit <- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = TRUE, lat.abs = TRUE)) - StrRaster <- prepraster(raster = ElevRasterDE, - isofit = GermanFit, - aggregation_factor = 0) + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat.abs = TRUE) + ) + StrRaster <- prepraster( + raster = ElevRasterDE, + isofit = GermanFit, + aggregation_factor = 0 + ) PrecipitationBrick <- prepcipitate(path = outpath, raster = StrRaster) # path = foo also works! levelplot(PrecipitationBrick) - + # example(isosim) ## not activated for now - + example(prepraster) - + example(prepsources) - + example(isofit) example(isomultifit) - + example(isoscape) - + example(isomultiscape) - + example(calibfit) - + example(isofind) } diff --git a/IsoriX/R/zzz.R b/IsoriX/R/zzz.R index 40dc29c..bc1fc48 100644 --- a/IsoriX/R/zzz.R +++ b/IsoriX/R/zzz.R @@ -4,24 +4,23 @@ .onAttach <- function(libname, pkgname) { ## This function should not be called by the user. ## It displays a message when the package is being loaded. - packageStartupMessage(## display message - "\n IsoriX version ", utils::packageDescription("IsoriX")$Version," is now loaded", - "\n", - "\n Type:", - "\n * `?IsoriX` for a very short description", - "\n * `browseURL('https://bookdown.org/content/782/')` for a longer (online) documentation", - "\n * `help(package = 'IsoriX', help_type = 'html')` for a list of the package objects and help files", - "\n * `citation('IsoriX')` for how to cite IsoriX (i.e. the papers you should read)", - "\n * `news(package = 'IsoriX')` for info on changed between versions of IsoriX", - "\n", - "\n Please join the mailing list 'https://groups.google.com/g/IsoriX'", - "\n for help, news and discussions about IsoriX", - "\n" - ) - + packageStartupMessage( ## display message + "\n IsoriX version ", utils::packageDescription("IsoriX")$Version, " is now loaded", + "\n", + "\n Type:", + "\n * `?IsoriX` for a very short description", + "\n * `browseURL('https://bookdown.org/content/782/')` for a longer (online) documentation", + "\n * `help(package = 'IsoriX', help_type = 'html')` for a list of the package objects and help files", + "\n * `citation('IsoriX')` for how to cite IsoriX (i.e. the papers you should read)", + "\n * `news(package = 'IsoriX')` for info on changed between versions of IsoriX", + "\n", + "\n Please join the mailing list 'https://groups.google.com/g/IsoriX'", + "\n for help, news and discussions about IsoriX", + "\n" + ) + .load_internal_files() ## lazy loading of the internal data - - } +} .onLoad <- function(libname, pkgname) { @@ -33,7 +32,7 @@ .onUnload <- function(libpath) { ## This function should not be called by the user. ## It restores the original R options. - options(.data_IsoriX$R_options) ## reset R options to their backed up values + options(.data_IsoriX$R_options) ## reset R options to their backed up values } @@ -87,7 +86,7 @@ .hit_return <- function() { ## This function should not be called by the user. ## It asks the user to press return in RStudio (used for plotting). - if (interactive() & .Platform$GUI == "RStudio" & getOption_IsoriX("dont_ask") == FALSE) { + if (interactive() && .Platform$GUI == "RStudio" && getOption_IsoriX("dont_ask") == FALSE) { cat("Hit for next plot") readline() } @@ -105,12 +104,11 @@ if (is.call(x = arg <- args[[arg_name]])) { if (arg[1] == "list()") { arg_input <- mget(names(args), envir = env)[[arg_name]] - arg_full <- eval(formals(fn)[[arg_name]]) + arg_full <- eval(formals(fn)[[arg_name]]) if (is.null(names(arg_input))) { if (length(arg_input) == length(arg_full)) { names(arg_input) <- names(arg_full) - } - else { + } else { stop(paste("The list", arg_name, "should contain names, or be of length equal to the default.")) } } @@ -129,11 +127,11 @@ ## If the months are already as numbers, it works too. ## Example: .converts_months_to_numbers(c("January", "Feb", 3, "April", "Toto")) end <- sapply(x, function(x) { - res <- match(tolower(x), tolower(month.abb)) ## deals with abbreviation + res <- match(tolower(x), tolower(month.abb)) ## deals with abbreviation if (is.na(res)) { - res <- match(tolower(x), tolower(month.name)) ## deals with full names + res <- match(tolower(x), tolower(month.name)) ## deals with full names } - if (is.na(res)) { ## deal with other cases + if (is.na(res)) { ## deal with other cases res <- x } if (res %in% paste(1:12)) { ## check if other cases are numbers (quoted or not) @@ -143,7 +141,7 @@ return(res) } else { warning("some months are NA after the conversion in integers, please check/fix your data!") - return(NA) ## if final output is not a number, it returns NA + return(NA) ## if final output is not a number, it returns NA } }) return(end) @@ -157,81 +155,86 @@ } else if (terra::inMemory(var)) { return(as.numeric(terra::values(var))) } - + if (interactive()) { print("extracting values from stored rasters...") } - + if (inherits(var, c("SpatRaster"))) { if (terra::nlyr(var) == 1) { - var <- terra::quantile(var, seq(0, 1, length = nb_quantiles)) - return(var) + var <- terra::quantile(var, seq(0, 1, length = nb_quantiles)) + return(var) } else if (terra::nlyr(var) > 1) { - max_var <- max(terra::values(max(var))) - min_var <- min(terra::values(min(var))) - var <- unique(c(min_var, - apply(terra::quantile(var, seq(0, 1, length = nb_quantiles)), 2, stats::median), - max_var)) - return(var) + max_var <- max(terra::values(max(var))) + min_var <- min(terra::values(min(var))) + var <- unique(c( + min_var, + apply(terra::quantile(var, seq(0, 1, length = nb_quantiles)), 2, stats::median), + max_var + )) + return(var) } } - + stop("'var' has an unknown class") } .crop_withmargin <- function(raster, xmin, xmax, ymin, ymax, margin_pct = 5) { ## This function should not be called by the user. - ## It crops a raster using a safety margin - margin_long <- (xmax - xmin) * margin_pct/100 - margin_lat <- (ymax - ymin) * margin_pct/100 - - terra::crop(raster, terra::ext(xmin - margin_long, - xmax + margin_long, - ymin - margin_lat, - ymax + margin_lat)) + ## It crops a raster using a safety margin. + margin_long <- (xmax - xmin) * margin_pct / 100 + margin_lat <- (ymax - ymin) * margin_pct / 100 + terra::crop(raster, terra::ext( + xmin - margin_long, + xmax + margin_long, + ymin - margin_lat, + ymax + margin_lat + )) } .invert_reg <- function(intercept, slope, SE_I, SE_S, phi, N, sign_mean_Y) { ## This function should not be called by the user. - ## It turns a regression x ~ y to a regression y ~ x + ## It turns a regression x ~ y to a regression y ~ x. Nminus1 <- N - 1L Nminus2 <- N - 2L - Nfac <- Nminus1/N - + Nfac <- Nminus1 / N + MSExony <- phi VarSxony <- SE_S^2 - Vary <- MSExony/(Nminus1*VarSxony) - Covxy <- Vary*slope - Varx <- (MSExony*(slope^2 + Nminus2*VarSxony))/(Nminus1*VarSxony) - o_slope <- Covxy/Varx - - resid_MSE <- (Vary - Covxy^2/Varx)*Nminus1/Nminus2 - o_SE_S <- sqrt(resid_MSE/(Nminus1*Varx)) - - Ey2 <- (SE_I/SE_S)^2 - Ey <- sign_mean_Y * sqrt(Ey2 - Vary*Nfac) - Ex <- intercept + slope*Ey - Ex2 <- Varx*Nfac + Ex^2 - o_SE_I <- sqrt(resid_MSE*Ex2/(Nminus1*Varx)) - vcov12 <- -resid_MSE*Ex/(Nminus1*Varx) + Vary <- MSExony / (Nminus1 * VarSxony) + Covxy <- Vary * slope + Varx <- (MSExony * (slope^2 + Nminus2 * VarSxony)) / (Nminus1 * VarSxony) + o_slope <- Covxy / Varx + + resid_MSE <- (Vary - Covxy^2 / Varx) * Nminus1 / Nminus2 + o_SE_S <- sqrt(resid_MSE / (Nminus1 * Varx)) + + Ey2 <- (SE_I / SE_S)^2 + Ey <- sign_mean_Y * sqrt(Ey2 - Vary * Nfac) + Ex <- intercept + slope * Ey + Ex2 <- Varx * Nfac + Ex^2 + o_SE_I <- sqrt(resid_MSE * Ex2 / (Nminus1 * Varx)) + vcov12 <- -resid_MSE * Ex / (Nminus1 * Varx) o_vcov <- matrix(c(o_SE_I^2, vcov12, vcov12, o_SE_S^2), ncol = 2) - - list(intercept = Ey - o_slope*Ex, - slope = o_slope, - SE_I = o_SE_I, - SE_S = o_SE_S, - phi = resid_MSE, - vcov = o_vcov) + + list( + intercept = Ey - o_slope * Ex, + slope = o_slope, + SE_I = o_SE_I, + SE_S = o_SE_S, + phi = resid_MSE, + vcov = o_vcov + ) } # Example: # set.seed(123) # xy <- data.frame(x = x <- rnorm(20), y = rnorm(20, mean = 10) + 0.7*x) # input <- lm(x ~ y, data = xy) # output <- lm(y ~ x, data = xy) -# +# # foo <- .invert_reg(intercept = coef(input)[1], # slope = coef(input)[2], # SE_I = sqrt(vcov(input)[1, 1]), @@ -239,39 +242,134 @@ # phi = summary(input)$sigma^2, # sign_mean_Y = sign(mean(xy$y)), # N = 20) -# +# # d_output <- data.frame(intercept = coef(output)[1], # slope = coef(output)[2], # SE_I = sqrt(vcov(output)[1, 1]), # SE_S = sqrt(vcov(output)[2, 2]), # phi = summary(output)$sigma^2) -# +# # d_foo <- data.frame(intercept = foo$intercept, # slope = foo$slope, # SE_I = foo$SE_I, # SE_S = foo$SE_S, # phi = foo$phi) -# +# # rbind(d_output, d_foo) .load_internal_files <- function() { ## This function should not be called by the user. - ## It performs the lazy loading of the data since terra cannot handle rda files + ## It performs the lazy loading of the data since terra cannot handle rda files. assign("ElevRasterDE", terra::rast(system.file("extdata/ElevRasterDE.tif", package = "IsoriX")), envir = as.environment("package:IsoriX")) assign("CountryBorders", terra::readRDS(system.file("extdata/CountryBorders.rds", package = "IsoriX")), envir = as.environment("package:IsoriX")) assign("OceanMask", terra::readRDS(system.file("extdata/OceanMask.rds", package = "IsoriX")), envir = as.environment("package:IsoriX")) + assign("PrecipBrickDE", terra::readRDS(system.file("extdata/PrecipBrickDE.rds", package = "IsoriX")), envir = as.environment("package:IsoriX")) } -.suppress_warning <- function(x, warn = "") { +## from purrr::quietly() +.quiet <- function(job) { ## This function should not be called by the user. - ## It hides expected warnings in some functions - withCallingHandlers(x, warning = function(w) { - if (!grepl(warn, x = w[[1]])) { - warning(w[[1]], call. = FALSE) - } + ## It capture warnings and messages in some functions. + warnings <- character() + wHandler <- function(w) { + warnings <<- c(warnings, conditionMessage(w)) invokeRestart("muffleWarning") + } + messages <- character() + mHandler <- function(m) { + messages <<- c(messages, conditionMessage(m)) + invokeRestart("muffleMessage") + } + temp <- file() + sink(temp) + on.exit({ + sink() + close(temp) }) + result <- withCallingHandlers(job, warning = wHandler, message = mHandler) + output <- paste0(readLines(temp, warn = FALSE), collapse = "\n") + list( + result = result, + output = output, + warnings = warnings, + messages = messages + ) +} + +.safe_and_quiet_predictions <- function(object, newdata, variances = list(), ...) { + ## This function should not be called by the user. + ## It run the predictions while capturing errors, warnings and messages without stopping + + res <- .quiet(tryCatch(spaMM::predict.HLfit(object = object, newdata = newdata, variances = variances, ...), + error = function(e) { + ## debugging mode + if (getOption_IsoriX("spaMM_debug")) { + stop(e) + } + + ## convert error into warning to avoid stopping + warning(paste(" WARNING converted from ERROR in `spaMM::predict.HLfit()`:\n", e)) + + ## return NAs for all outputs of `spaMM::predict.HLfit` if there is an error + m <- matrix(NA, nrow = nrow(newdata), ncol = 1) + if (isTRUE(as.vector(variances)[["predVar"]]) || isTRUE(as.vector(variances)[["respVar"]])) { + if (isTRUE(as.vector(variances)[["cov"]])) { + param_fixed <- names(spaMM::fixef(object)) + m <- matrix(NA, nrow = nrow(length(param_fixed)), ncol = nrow(length(param_fixed))) + rownames(m) <- colnames(m) <- names(param_fixed) + attr(m, "predVar") <- m + } + attr(m, "predVar") <- rep(NA, nrow(newdata)) + } + if (isTRUE(as.vector(variances)[["residVar"]])) { + if (isTRUE(as.vector(variances)[["cov"]])) { + param_fixed <- names(spaMM::fixef(object)) + m <- matrix(NA, nrow = nrow(length(param_fixed)), ncol = nrow(length(param_fixed))) + rownames(m) <- colnames(m) <- names(param_fixed) + attr(m, "residVar") <- m + } + attr(m, "residVar") <- rep(NA, nrow(newdata)) + } + if (isTRUE(as.vector(variances)[["respVar"]])) { + if (isTRUE(as.vector(variances)[["cov"]])) { + param_fixed <- names(spaMM::fixef(object)) + m <- matrix(NA, nrow = nrow(length(param_fixed)), ncol = nrow(length(param_fixed))) + rownames(m) <- colnames(m) <- names(param_fixed) + attr(m, "respVar") <- m + } + attr(m, "respVar") <- rep(NA, nrow(newdata)) + } + return(m) + } + )) + + ## debugging mode + if (getOption_IsoriX("spaMM_debug")) { + if (length(res$warnings) > 0) warnings(res$warnings) + if (length(res$messages) > 0) message(res$messages) + } + + ## return + res } +release_bullets <- function() { + ## This function should not be called by the user. + ## It is used to add bullet points to GitHub issues created with `usethis::use_release_issue()` + c( + "run `styler:::style_active_pkg()` for reformatting of the code according to `styler::tidyverse_style()`", + "run `lintr::lint_package()` for checking code in depth", + "run `all_checks <- rhub::check_for_cran()`", + "run `usethis::use_cran_comments()` and paste the output of `all_checks$cran_summary()` in the created file", + "run `make build` & `make check` in terminal", + "check folder `IsoriX.Rcheck` and file in there", + "run `tests/local_tests.R` step-by-step", + "rebuilt bookdown (after `usethis::use_version('patch')`) to make sure all works in there too (open Rproj from the bookdown folder and follow instructions in README)", + "update bookdown (open Rproj from the bookdown folder and follow instructions in README)", + "write email to Google Group (https://groups.google.com/g/IsoriX)" + ) +} + + utils::globalVariables(c("CountryBorders", "OceanMask")) diff --git a/IsoriX/README.md b/IsoriX/README.md index e931443..b00fbff 100644 --- a/IsoriX/README.md +++ b/IsoriX/README.md @@ -4,14 +4,15 @@ This is the development folder for the R package IsoriX. See [main page](https://github.com/courtiol/IsoriX) for description and installation procedure. -## Some useful links for developers - - - [Simple introduction to the making of R packages](http://r-pkgs.had.co.nz/) +## Steps before new release - - [Writing R extensions](https://cran.r-project.org/doc/manuals/r-release/R-exts.html) + - run `usethis::use_release_issue(version = "XX")` with `"XX"` the future release number. + - follow the steps in the issue created by the previous command. - - [R coding standard](https://google.github.io/styleguide/Rguide.xml) +## Some useful links for developers - - [Using version control in RStudio](https://support.rstudio.com/hc/en-us/articles/200532077-Version-Control-with-Git-and-SVN) - + - [Wickham & Bryan's book *R packages*](https://r-pkgs.org/) + - [Writing R extensions](https://cran.r-project.org/doc/manuals/r-release/R-exts.html) + - [R coding standard](https://style.tidyverse.org/) + - [Using version control in RStudio](https://support.posit.co/hc/en-us/articles/200532077-Version-Control-with-Git-and-SVN) - [Preparing your package for a CRAN submission](https://github.com/ThinkR-open/prepare-for-cran) diff --git a/IsoriX/cran-comments.md b/IsoriX/cran-comments.md new file mode 100644 index 0000000..bf04142 --- /dev/null +++ b/IsoriX/cran-comments.md @@ -0,0 +1,5 @@ +## R CMD check results + +0 errors | 0 warnings | 1 note + +* checks flag URL "https://nucleus.iaea.org/wiser/index.aspx" as invalid but it is valid when directly accessed. diff --git a/IsoriX/data/PrecipBrickDE.rda b/IsoriX/data/PrecipBrickDE.rda deleted file mode 100644 index 4437c53..0000000 Binary files a/IsoriX/data/PrecipBrickDE.rda and /dev/null differ diff --git a/IsoriX/inst/NEWS.Rd b/IsoriX/inst/NEWS.Rd deleted file mode 100644 index 4824661..0000000 --- a/IsoriX/inst/NEWS.Rd +++ /dev/null @@ -1,298 +0,0 @@ -\name{NEWS} -\title{IsoriX News} - -\encoding{UTF-8} - -\section{version 1.0}{ - \subsection{Upcoming features planned for future releases}{ - \itemize{ - \item (version 1.0 does not exist yet) - \item feature requests can be defined and watched here: \url{https://github.com/courtiol/IsoriX/issues} - } - } -} - -\section{version 0.9.1}{ - \subsection{Major change}{ - \itemize{ - \item **IMPORTANT** Several spatial packages previously used by IsoriX are likely to retire sometimes in October 2023. - The maintainers of those packages have recommended developpers to instead rely on alternative packages which have been recently developped and which superseed the old packages. - As a consequence, we had to recode a lot of IsoriX for it to continue to work. - For the most part, these changes are internal and should not impact much users, but it is possible that old workflows used to create plots will have to be adapted for the code to keep working. - Moreover, IsoriX is not the only package that had to be overhault, other packages used by IsoriX are also being adapted, which means that the programming landscape is dynamic and bugs caused by incompatibility between packages are likely to surface. We will do our best to react quickly, but please let us know as soon as something goes wrong by dropping issues on the GitHub repository for IsoriX (\url{https://github.com/courtiol/IsoriX/issues}). - All this change can be perceived as annoying, but it is also for the best: it will allow us to add new features more easily in IsoriX in the future and it also makes it easier for users to convert IsoriX outputs so as to manipulate them using packages such as \pkg{sf} and \pkg{ggplot2}. - \item IsoriX no longer relies on the package \pkg{raster}. It instead now uses \pkg{terra} for handling rasters (#90 #161). - \item IsoriX no longer relies on the package \pkg{sp}. Plotting functionalities from sp have now been replaced by direct calls to \pkg{lattice}. For now, we had to implement methods and generics calling \pkg{lattice} in IsoriX, but those should ultimately be handled within \pkg{rasterVis} and \pkg{lattice}. See the file \code{interim_avoid_sp.R} for details. - } - } - \subsection{Minor change}{ - \itemize{ - \item \code{getprecip()} now normalises the input file and returns the path where the precipitation rasters are stored. - \item \code{prepcipitate()} can now handle as input for \code{path =} either the full path to the files returned by \code{getprecip()} -- which contains the folder inputed in \code{path} when calling \code{getprecip()} in addition to \code{"/wc2.1_30s_prec"} -- or the reduced path which only contains the folder inputed in \code{path} when calling \code{getprecip()}. - \item \code{getprecip()} now changes the timeout R options temporarily so as to avoid the download to fail because the default timeout setting is too short (#148). - \item the documentation for the datasets \code{GNIPDataALLagg} and \code{GNIPDataEUagg} was incorrect (#158). - \item one message about possible extrapolation during calibration was erroneous and is now removed (#159). - } - } - \subsection{Geeky change}{ - \itemize{ - \item \code{OceanMask} and \code{CountryBorders} are no longer stored as RDA files in \code{/data}, but as RDS files in \code{/extata} since objects created with \pkg{terra} cannot be saved as RDA files. These files are automatically loaded when the package is attached. - \item \pkg{elevatr} moved from Imports to Suggests (#157). - } - } -} - -\section{version 0.9}{ - \subsection{Solved BUGS}{ - \itemize{ - \item the previous released introduced an error in how the variance of the assignment test is computed in the absence of calibration (with important consequence in terms of assignments). This is now fixed (#151). - } - } - \subsection{Minor change}{ - \itemize{ - \item the base package 'colourspace' is now suggested to avoid a note in R CMD check. - } - } -} - -\section{version 0.8.3}{ - \subsection{New features}{ - \itemize{ - \item the function `calibfit()` gains an argument method that allows for selecting one of four calibration methods ("wild", "lab", "desk", "desk_inverse"). This allows for users to use 1) calibration samples associated with unknown environmental isotopic values, 2) calibration samples associated with known environmental isotopic values, or 3 & 4) the intercept and slope of a calibration relationship computed by others (e.g. values found in a paper). Note: the desk* methods allow for the consideration of a fractionaction factor too (i.e. slope = 0). See \code{calibfit} for details. (#20 & #142) - \item the function `getelev()` has been completely rewriten so as to rely on the package **elevatr** to download elevation data. You should check `?getelev` for learning how to use the new version of the function, but we retained the core principle of the previous function so that old workflow will only require minor adjustements. The new version still saves a *.tif file on the disk, albeit uing a different file name to avoid (data) confusion. (#140 & #107) - \item the function `isofind()` gains an argument `neglect_covPredCalib` that allows for the computation of a covariance term that was so far neglected in IsoriX. See `?isofind` for details. (#143) - \item the function `prepraster()` gains an argument `values_to_zero` to turn a range of elevation values to zeros (nullify negative elevation values by default). This is particular useful because the new version of `get_elev()` download an elevation raster that includes bathymetry. - \item new internal function `.invert_reg()` to invert regression (used for method "desk_inverse" in `calibfit()`) - } - } - \subsection{Minor change}{ - \itemize{ - \item when calling `plot()` on an object created with `calibfit()`, the plotting function now returns the fitted values and CI for users to be able to make alternative plots (#44) - \item new argument `xlim` for the plotting function for calibration fits - \item new argument `line` for customising how to plot the regression line in calibration fits - \item the summary method for calibration fits now displays the residual variance - \item `calibfit()` performs more check on extrapolation (#119) - \item when using `plot()` on an object of class ISOFIT, the x-axis for the plot showing the Matern correlation should have a range more adequate irrespective when autocorrelation is strong over short distances (#134) - \item documentation for `?plot()` now contains a description of what symbols mean in plots (#138) - \item when calling `plot()` on an object created with `isofind()`, the plotting function now detects sample of size 1 and no longer displays "Group" in the title of the assignment plot even if `who` = "group" (#120) - \item all functions accepting a `data.frame` as input should also now be compatible when provided with a `tibble` (#118) - \item typos have been corrected (#130) - \item default y-axis title changed to "Isotopic value in the environment" when plotting calibration fits to be flexible enough irrespective of the methods used in `calibfit()` - } - } - \subsection{Geeky change}{ - \itemize{ - \item the argument `long_min`, `long_max`, `lat_min` & `lat_max` function `prepsources()` now have explicit default values and should no longer be missing. - \item the version of spaMM required by IsoriX has changed to 3.13 so as to benefit from a new extractor we rely on for the computation of the 4th variance term during assignment (#143) - \item the function depending on the package RandomFields are no longer available since that package has been (for now) retired by CRAN :-( - \item IsoriX should now work with tibbles as inputs (#118) - } - } - \subsection{Solved BUGS}{ - \itemize{ - \item the printing method for the object of class ISOSCAPE was somehow not exported and thus not used (unreported issue) - \item plotting on a sphere ISOFIND objects did not work in some cases (#126) - } - } -} - -\section{version 0.8.2}{ - \subsection{New features}{ - \itemize{ - \item argument ylim for the plotting function for calibration fits - \item it is now possible to calibrate data containing missing isotopic values - \item it is now possible to assign data containing missing isotopic values - } - } - \subsection{Geeky change}{ - \itemize{ - \item the SpatialPolygons CountryBorders and OceanMask have been rebuilt for possibly improving the compatibility with new sp & rgdal - \item the website for WorlClim has now changed address, so links have been updated - \item rgdal is now listed as a suggested package - } - } - \subsection{Minor change}{ - \itemize{ - \item several weblinks had changed and have been updated - \item all old defunct functions have been removed from the package - } - } -} - -\section{version 0.8.1}{ - \subsection{Solved BUGS}{ - \itemize{ - \item fix issue #113: the plotting function was not working for isoscapes not stored in memory due to a wrong use of the quantile function. Many thanks to Dr. Gary Roemer and Amy Withers for reporting it! - } - } -} - -\section{version 0.8.1}{ - \subsection{New features}{ - \itemize{ - \item the datasets used in Courtiol et al. 2019 are now provided - \item many useful functions from raster, rasterVis, lattice... are now re-exported so they can be used without attaching those packages - \item new option in plots that allows to map the isoscape onto a sphere - \item a new dataset PrecipBrickDE containing monthly precipitation amounts for Germany - \item an argument y_title for the plotting function for isoscapes to allow one to simply change the title - \item arguments xlab and ylab for the plotting function for calibration fits - \item new method points for plotting more than one calibration fit - \item the plotting function for assignments can now show the location of the assignment samples - } - } - - \subsection{Major changes}{ - \itemize{ - \item the citations for the package have been updated! - \item many objects have been renamed to prepare the release of the version 1.0 - \item the vignettes have now been moved to a bookdown. To access the documentation you should now visit: \url{https://bookdown.org/content/782/} - } - } - - \subsection{Minor changes}{ - \itemize{ - \item all arguments 'bla.bla' have been renamed 'bla_bla' - \item the plotting function for calibfit gains an argument "..." for more control - \item a ploting method for rasterLayer has been included for conveniance - \item the function relevate is now called prepraster - \item the function prepdata is now called prepsources - \item in several functions the argument elevation.raster has been renamed as raster - \item in several functions the argument xxx.data has been renamed as data - } - } - - \subsection{Geeky changes}{ - \itemize{ - \item the file storing the internal functions is now called zzz.R - \item the dontrun and donttest have been replaced by comments due to new R CMD check flags - \item the function downloadfile is now exported - \item large temporary objects are now deleted within isofind to limit memory usage - \item the package is now being tested using testthat, but tests will be implemented in the future - \item a lot of the internal code as been rewriten to comply more closely to the IsoriX coding style - \item the list of suggested packages has been revised and rgdal removed as it caused (again) problems with Travis CI - \item following a change in spaMM predict.HLfit, the prediction are now being made by chunck of 1000 points instead of 150. This should lead to a tiny gain in performance - \item the function isoscape was performing predictions twice every 150 (or now 1000) locations, this was not influencing the isoscapes produced, but this has now been corrected - \item the function prepraster now produces an raster stored in memory if it is possible. This should prevent bugs that appears when using loaded rasters that were previously saved (the temporary link to the hard drive location is no longer correct in this case). - \item the function .objective_fn_calib has been moved within the function calibfit as it is not used elsewhere - \item the function calibfit as been prepared for a possible activation of a random effect for species ID in the future. But whether it would make sense or not remains to be determined. - \item the function .Fisher_method now directly computes the exponential of the log pv if only one value is provided. This leads to much faster assignment in the case of a single observation. - } - } - - \subsection{Solved BUGS}{ - \itemize{ - \item the plotting function for calibration fit was displaying CI based on variance instead of SD - \item the function getprecip and prepcipitate were not handling paths manualy defined properly - \item the plotting functions were crashing in case of no variation in the landscape - \item the plotting functions were crashing when called on multiple-raster objects not stored 'inMemory' - \item the plotting function for fitted model was not displaying one plot in RStudio when called on objects of class MULTIISOFIT - } - } -} - -\section{version 0.7.1}{ - \subsection{New features}{ - \itemize{ - \item this is a minor update necessary to maintain compatibility with spaMM 2.4 - } - } - \subsection{Geeky changes}{ - \itemize{ - \item the syntax for the extraction of correlation terms of spaMM objects has changed - } - } -} - -\section{version 0.7}{ - \subsection{New features}{ - \itemize{ - \item the calibration step is now optional, allowing for users to use an isoscape directly fitted on tissues instead of precipitation water - \item the function queryGNIP has been renamed and is now called prepdata, this function can also handle other datasets than GNIP - \item the function relevate has been modified to make crop possible around the pacific meridian -180/180 (but several issues remain to handle extra plot layers automatically) - } - } - \subsection{Geeky changes}{ - \itemize{ - \item an additional options as been added to prevent prompting during examples - \item new internal function .converts_months_to_numbers - } - } -} - -\section{version 0.6}{ - \subsection{New features}{ - \itemize{ - \item the maximum duration of running time for examples can now be controlled using IsoriX.options(example_maxtime = XX) - \item due to new GNIP policies, we no longer provide the GNIP dataset for the entire World, but only a subset containing data for Germany (users should thus compile their precipitatin data themselves from the 'wiser' plateform provided by GNIP; see vignette Workflow) - \item it is now possible to control the colours and labels for the levels of isotopes or p-values in plots - \item for plotting, it is no longer needed to load the ocean mask and country borders (it now happens automatically) - \item the function relevate now allows for a cropping larger than the extent of the weather stations by means of the argument margin_pct - \item it is now possible to create the so-called annual averaged precipitation isoscapes! - \item queryGNIP can now split the dataset per month or year at each location during the aggregation - \item new function prepcipitate to prepare the precipitation brick - \item new function getprecip to download monthly precipitation rasters from WorldClim - \item new function isomultifit fitting isoscapes per strata (month, year, or any "split") - \item new function isomultiscape building isoscapes averaged across strata - \item new function create_aliens simulating of organism data - } - } - \subsection{Minor changes}{ - \itemize{ - \item the inputs for filtering data by month or year using queryGNIP have changed - \item the default fixed effect structure for the mean model is isofit has changed - } - } - \subsection{Geeky changes}{ - \itemize{ - \item the namespace is now generated with Roxygen2 - \item the datasets are now 'lazy-loaded' - \item new vignette for coding conventions - \item changed some object names following our coding convention (more to come) - } - } -} - -\section{version 0.5}{ - \subsection{Solved BUGS}{ - \itemize{ - \item the package could not be detached and reloaded - \item the citation was not correct - \item the path in getelev was breaking in some cases - \item the title of the assignment plot was missing when a single individual was plotted - } - } - - \subsection{New feature(s)}{ - \itemize{ - \item new vignette explaining how to export spatial objects to GIS - \item the file GNIPdata has been updated and now contain 2014 data - \item names of all functions and objects have been refactored to remove upper cases - \item links to our GitHub directory have been added - \item new function downloadfile to download non standard elevation raster or any other file - \item function getelev can perform MD5 sum checks if the package 'tools' is installed - \item function getelev can display additional information during download if verbose > 1 - \item the column animalID in the assignment dataset can now handle names with spaces - \item added Codecov to track test coverage for the package - } - } - - \subsection{Minor changes}{ - \itemize{ - \item the modification of the option set_ll_warn from the 'sp' package has been moved to onLoad (instead of onAttached) and the original state is now restored while unloading 'IsoriX' - \item the Earth distance method has been moved to the package 'spaMM' - \item function getelev lost its 'address' argument as downloadfile should now be used to download non-standard elevation rasters - \item some typo fixed in documentation files - \item RandomFields moved to suggest - \item .Rd files for documentation are now generated with Roxygen2 - \item queryGNIP is now provided with a single month argument specifying the months to select - } - } -} - -\section{version 0.4-1}{ - \subsection{New feature(s)}{ - \itemize{ - \item this was the first version of IsoriX submitted to CRAN - } - } -} diff --git a/IsoriX/inst/WORDLIST b/IsoriX/inst/WORDLIST new file mode 100644 index 0000000..2601302 --- /dev/null +++ b/IsoriX/inst/WORDLIST @@ -0,0 +1,123 @@ +AWS +Beuneux +CALIBFIT +CMD +CalibDataAlien +CalibDataBat +Chun +Ciechanowski +Codecov +CountryBorders +DX +Ecography +ElevRasterDE +Ferdy +Fonderflick +GLMM +GNIP +GNIPDataALLagg +GNIPDataDE +GNIPDataEUagg +Goerfoel +Greif +ISOFIND +ISOFIT +ISOSCAPE +Ibanez +Isoscape +Juste +Kelm +LM +LMM +Lehnert +Lenhert +Lisseanu +Luckner +MULTIISOFIT +Matérn +Mbe +Myslajek +Niermann +Nyctalus +Oecologia +PLoS +Popa +RDA +RMA +RStudio +RandomFields +RasterBrick +Roemer +Rohwäder +Roxygen +Rproj +SEs +Schadt +Soergel +Soto +SpatRaster +SpatVector +SpatialPolygons +Stichler +Stumpp +Voigt +WGS +Wassenaar +WorldClim +asl +bioRxiv +bookdown +booleans +bzip +calibfit +colourspace +covPredCalib +dataThief +disp +doi +elev +elevatr +elipsis +geostatistical +getprecip +ggplot +graphClick +inMemory +isofind +isofit +isomultifit +isomultiscape +isoscape +isoscapes +isostopic +lat +levelplot +lomg +md +metaDigitse +noctula +noctule +pch +prepcipitate +prepraster +prepsources +pv +queryGNIP +rast +rasterVis +rgdal +sp +spaMM +stat +superseed +terra +testthat +th +tibble +tibbles +usethis +var +viridis +viridisLite +withr +xz diff --git a/IsoriX/inst/extdata/PrecipBrickDE.rds b/IsoriX/inst/extdata/PrecipBrickDE.rds new file mode 100644 index 0000000..3e2799f Binary files /dev/null and b/IsoriX/inst/extdata/PrecipBrickDE.rds differ diff --git a/IsoriX/man/AssignDataAlien.Rd b/IsoriX/man/AssignDataAlien.Rd index b2921bf..f23d5c2 100644 --- a/IsoriX/man/AssignDataAlien.Rd +++ b/IsoriX/man/AssignDataAlien.Rd @@ -11,7 +11,7 @@ A *dataframe* with 10 observations on 2 variables: [, 2] \tab sample_value \tab (*numeric*) \tab Hydrogen delta value of the tissue\cr} } \description{ -This dataset contains simulated hydrogen delta values. +This dataset contains simulated hydrogen delta values. The data can be used as an example to perform assignments using the function [isofind]. } \examples{ @@ -24,35 +24,37 @@ str(AssignDataAlien) ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { +if (getOption_IsoriX("example_maxtime") > 30) { + ## The following describes how we created such dataset -## The following describes how we created such dataset + ### We prepare the precipitation data + GNIPDataDEagg <- prepsources(data = GNIPDataDE) -### We prepare the precipitation data -GNIPDataDEagg <- prepsources(data = GNIPDataDE) + ### We fit the models for Germany + GermanFit <- isofit(data = GNIPDataDEagg) -### We fit the models for Germany -GermanFit <- isofit(data = GNIPDataDEagg) + ### We build the isoscape + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -### We build the isoscape -GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) - -### We create a simulated dataset with 1 site and 10 observations -set.seed(1L) -Aliens <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), - isoscape = GermanScape, - raster = ElevRasterDE, - coordinates = data.frame(site_ID = "Berlin", - long = 13.52134, - lat = 52.50598), - n_sites = 1, - min_n_samples = 10, - max_n_samples = 10) -AssignDataAlien <- Aliens[, c("sample_ID", "sample_value")] - -### Uncomment the following to store the file as we did -#save(AssignDataAlien, file = "AssignDataAlien.rda", compress = "xz") + ### We create a simulated dataset with 1 site and 10 observations + set.seed(1L) + Aliens <- create_aliens( + calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), + isoscape = GermanScape, + raster = ElevRasterDE, + coordinates = data.frame( + site_ID = "Berlin", + long = 13.52134, + lat = 52.50598 + ), + n_sites = 1, + min_n_samples = 10, + max_n_samples = 10 + ) + AssignDataAlien <- Aliens[, c("sample_ID", "sample_value")] + ### Uncomment the following to store the file as we did + # save(AssignDataAlien, file = "AssignDataAlien.rda", compress = "xz") } } diff --git a/IsoriX/man/AssignDataBat.Rd b/IsoriX/man/AssignDataBat.Rd index a281de6..2f2f444 100644 --- a/IsoriX/man/AssignDataBat.Rd +++ b/IsoriX/man/AssignDataBat.Rd @@ -28,7 +28,7 @@ str(AssignDataBat) } \references{ -Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial +Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial mammals using stable isotopes. In Hobson KA, Wassenaar LI (eds.), Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. } diff --git a/IsoriX/man/CALIBFIT-class.Rd b/IsoriX/man/CALIBFIT-class.Rd new file mode 100644 index 0000000..060c016 --- /dev/null +++ b/IsoriX/man/CALIBFIT-class.Rd @@ -0,0 +1,33 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/S4classes.R +\docType{class} +\name{CALIBFIT-class} +\alias{CALIBFIT-class} +\title{Class CALIBFIT} +\description{ +Class CALIBFIT +} +\section{Slots}{ + +\describe{ +\item{\code{method}}{a character string indicating the method used for the calibration} + +\item{\code{species_rand}}{a logical indicating whether the species random effect is included in the model} + +\item{\code{site_rand}}{a logical indicating whether the site random effect is included in the model} + +\item{\code{param}}{the fixed-effect estimates of the calibration function} + +\item{\code{fixefCov}}{the covariance matrix of the fixed effects} + +\item{\code{phi}}{the residual variance of the calibration fit} + +\item{\code{calib_fit}}{the fitted calibration model (if applicable)} + +\item{\code{iso_fit}}{the fitted calibration model (if applicable)} + +\item{\code{data}}{the calibration data} + +\item{\code{sp_points}}{a list of spatial points used for calibration} +}} + diff --git a/IsoriX/man/CalibDataAlien.Rd b/IsoriX/man/CalibDataAlien.Rd index c284966..b95dd67 100644 --- a/IsoriX/man/CalibDataAlien.Rd +++ b/IsoriX/man/CalibDataAlien.Rd @@ -16,15 +16,15 @@ A *dataframe* with x observations on 6 variables: } \description{ This dataset contains simulated hydrogen delta values for corresponding locations -based on an assumed linear relationship between the animal tissue value and the +based on an assumed linear relationship between the animal tissue value and the hydrogen delta values in the environment. The data can be used as an example to fit a calibration model using the function [calibfit]. } \details{ Users who wish to use their own dataset for calibration should create a -*dataframe* of similar structure than this one. The columns should possess -the same names as the ones described above. If the elevation is unknown at the +*dataframe* of similar structure than this one. The columns should possess +the same names as the ones described above. If the elevation is unknown at the sampling sites, elevation information can be extracted from a high resolution elevation raster using the function [terra::extract]. In this dataset, we retrieved elevations from the Global Multi-resolution Terrain Elevation Data @@ -40,36 +40,35 @@ str(CalibDataAlien) ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { +if (getOption_IsoriX("example_maxtime") > 30) { + ## We prepare the precipitation data + GNIPDataDEagg <- prepsources(data = GNIPDataDE) -## We prepare the precipitation data -GNIPDataDEagg <- prepsources(data = GNIPDataDE) + ## We fit the models for Germany + GermanFit <- isofit(data = GNIPDataDEagg) -## We fit the models for Germany -GermanFit <- isofit(data = GNIPDataDEagg) + ## We build the isoscape + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -## We build the isoscape -GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) + ## We create a simulated dataset with 50 site and 10 observations per site + set.seed(2L) + CalibDataAlien <- create_aliens( + calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), + isoscape = GermanScape, + raster = ElevRasterDE, + n_sites = 50, + min_n_samples = 10, + max_n_samples = 10 + ) + plot(sample_value ~ source_value, data = CalibDataAlien) + abline(3, 0.5) -## We create a simulated dataset with 50 site and 10 observations per site -set.seed(2L) -CalibDataAlien <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), - isoscape = GermanScape, - raster = ElevRasterDE, - n_sites = 50, - min_n_samples = 10, - max_n_samples = 10) -plot(sample_value ~ source_value, data = CalibDataAlien) -abline(3, 0.5) - -CalibDataAlien$source_value <- NULL - -## Uncomment the following to store the file as we did -#save(CalibDataAlien, file = "CalibDataAlien.rda", compress = "xz") + CalibDataAlien$source_value <- NULL + ## Uncomment the following to store the file as we did + # save(CalibDataAlien, file = "CalibDataAlien.rda", compress = "xz") } - } \seealso{ [calibfit] to fit a calibration model diff --git a/IsoriX/man/CalibDataBat.Rd b/IsoriX/man/CalibDataBat.Rd index 4102cb5..ccfa8bf 100644 --- a/IsoriX/man/CalibDataBat.Rd +++ b/IsoriX/man/CalibDataBat.Rd @@ -37,7 +37,7 @@ head(CalibDataBat) str(CalibDataBat) } \references{ -Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial +Voigt CC & Lehnert L (2019). Tracking of movements of terrestrial mammals using stable isotopes. In Hobson KA, Wassenaar LI (eds.), Tracking Animal Migration with Stable Isotopes, second edition. Academic Press, London. diff --git a/IsoriX/man/CalibDataBat2.Rd b/IsoriX/man/CalibDataBat2.Rd index eeebcb4..bb90148 100644 --- a/IsoriX/man/CalibDataBat2.Rd +++ b/IsoriX/man/CalibDataBat2.Rd @@ -41,7 +41,7 @@ str(CalibDataBat2) ## an elevation raster with the function getelev() ## and will therefore not run unless you uncomment it -#if (require(terra)){ +# if (require(terra)){ # ## We delete the elevation data # CalibDataBat2$elev <- NULL # @@ -56,7 +56,7 @@ str(CalibDataBat2) # ElevationRasterBig, # cbind(CalibDataBat2$long, CalibDataBat2$lat)) # head(CalibDataBat2) -#} +# } } \references{ diff --git a/IsoriX/man/CountryBorders.Rd b/IsoriX/man/CountryBorders.Rd index 2877dfe..17fca59 100644 --- a/IsoriX/man/CountryBorders.Rd +++ b/IsoriX/man/CountryBorders.Rd @@ -19,7 +19,7 @@ It can be used to draw the borders of world countries. } \examples{ -plot(CountryBorders, border="red", col="darkgrey") +plot(CountryBorders, border = "red", col = "darkgrey") ## How did we create this file? diff --git a/IsoriX/man/ElevRasterDE.Rd b/IsoriX/man/ElevRasterDE.Rd index d1ac429..c892cfa 100644 --- a/IsoriX/man/ElevRasterDE.Rd +++ b/IsoriX/man/ElevRasterDE.Rd @@ -33,7 +33,7 @@ median(values(cellSize(ElevRasterDE, unit = "km"))) # x = c(5.5, 15.5), y = c(47, 55.5)), # prj = "+proj=longlat +datum=WGS84 +no_defs", # clip = "bbox", z = 3) -# +# # ElevRasterDE <- terra::rast(ElevRasterDE) @@ -43,11 +43,10 @@ median(values(cellSize(ElevRasterDE, unit = "km"))) # getelev(file = "~/ElevRasterDE.tif", # z = 3, # long_min = 5.5, long_max = 15.5, lat_min = 47, lat_max = 55.5) - + # ## Convert the tif into R raster format # ElevRasterDE <- rast('~/ElevRasterDE.tif') - } \seealso{ \link{prepraster} to crop and/or aggregate this raster diff --git a/IsoriX/man/GNIPDataDE.Rd b/IsoriX/man/GNIPDataDE.Rd index 951259a..ca9d3d5 100644 --- a/IsoriX/man/GNIPDataDE.Rd +++ b/IsoriX/man/GNIPDataDE.Rd @@ -30,7 +30,7 @@ provide larger GNIP dataset in the package as those are not free to reuse (but we do provide aggregated versions of it; see [GNIPDataEUagg]). You can still download the complete GNIP dataset for free, but you will have to proceed to a registration process with GNIP and use their downloading -interface WISER (\url{http://www-naweb.iaea.org/napc/ih/IHS_resources_isohis.html}). +interface WISER (\url{https://nucleus.iaea.org/wiser/index.aspx}). } \details{ The dataset contains non-aggregated data for 27 weather stations across Germany. diff --git a/IsoriX/man/GNIPDataEUagg.Rd b/IsoriX/man/GNIPDataEUagg.Rd index d0b44a0..da2edce 100644 --- a/IsoriX/man/GNIPDataEUagg.Rd +++ b/IsoriX/man/GNIPDataEUagg.Rd @@ -31,7 +31,7 @@ non-aggregate GNIP dataset in the package as it is not free to reuse. You can still download the complete GNIP dataset for free, but you will have to proceed to a registration process with GNIP and use their downloading interface WISER -(\url{http://www-naweb.iaea.org/napc/ih/IHS_resources_isohis.html}). +(\url{https://nucleus.iaea.org/wiser/index.aspx}). } \details{ These datasets have been aggregated and can thus be directly used for fitting diff --git a/IsoriX/man/ISOFIND-class.Rd b/IsoriX/man/ISOFIND-class.Rd new file mode 100644 index 0000000..78e8a45 --- /dev/null +++ b/IsoriX/man/ISOFIND-class.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/S4classes.R +\docType{class} +\name{ISOFIND-class} +\alias{ISOFIND-class} +\title{Class ISOFIND} +\description{ +Class ISOFIND +} +\section{Slots}{ + +\describe{ +\item{\code{sample}}{a list of SpatRaster objects storing the assignment info for each sample} + +\item{\code{group}}{a SpatRaster storing the group assignment info} + +\item{\code{sp_points}}{a list of SpatVector storing the spatial points for sources, calibration and assignment samples} +}} + diff --git a/IsoriX/man/ISOSCAPE-class.Rd b/IsoriX/man/ISOSCAPE-class.Rd new file mode 100644 index 0000000..e89615c --- /dev/null +++ b/IsoriX/man/ISOSCAPE-class.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/S4classes.R +\docType{class} +\name{ISOSCAPE-class} +\alias{ISOSCAPE-class} +\title{Class ISOSCAPE} +\description{ +Class ISOSCAPE +} +\section{Slots}{ + +\describe{ +\item{\code{isoscapes}}{a SpatRaster storing the isoscapes} + +\item{\code{sp_points}}{a list of spatial points} +}} + diff --git a/IsoriX/man/OceanMask.Rd b/IsoriX/man/OceanMask.Rd index 84b372b..a1087cc 100644 --- a/IsoriX/man/OceanMask.Rd +++ b/IsoriX/man/OceanMask.Rd @@ -16,7 +16,7 @@ It can be used to mask large bodies of water. } \examples{ -plot(OceanMask, col='blue') +plot(OceanMask, col = "blue") ## How did we create this file? @@ -28,7 +28,6 @@ plot(OceanMask, col='blue') # #saveRDS(OceanMask, file = "IsoriX/inst/extdata/OceanMask.rds", compress = "xz") # } - } \seealso{ \itemize{ diff --git a/IsoriX/man/PrecipBrickDE.Rd b/IsoriX/man/PrecipBrickDE.Rd index 92dae4e..5794f43 100644 --- a/IsoriX/man/PrecipBrickDE.Rd +++ b/IsoriX/man/PrecipBrickDE.Rd @@ -27,9 +27,9 @@ be downloaded using the function \link{getprecip}. ## How did we create this file? ## Uncomment the following to create the file as we did -#getprecip() ## Download the tif files (~ 1 Gb compressed) -#PrecipBrickDE <- prepcipitate(raster = ElevRasterDE) -#save(PrecipBrickDE, file = "PrecipBrickDE", compress = "xz") +# getprecip() ## Download the tif files (~ 1 Gb compressed) +# PrecipBrickDE <- prepcipitate(raster = ElevRasterDE) +# terra::saveRDS(PrecipBrickDE, file = "PrecipBrickDE.rds", compress = "xz") } \seealso{ diff --git a/IsoriX/man/calibfit.Rd b/IsoriX/man/calibfit.Rd index 2918ebb..84ad5ff 100644 --- a/IsoriX/man/calibfit.Rd +++ b/IsoriX/man/calibfit.Rd @@ -258,120 +258,136 @@ for the calibration fit. This is required for pivoting the regression from ## (so don't write XX but put a number instead!) if (getOption_IsoriX("example_maxtime") > 30) { - -##################################################### -## 1 Example of calibration using the method "wild" # -##################################################### - -## 1.1 We prepare the data to fit the isoscape: -GNIPDataDEagg <- prepsources(data = GNIPDataDE) - -## 1.2 We fit the isoscape models for Germany: -GermanFit <- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) - -## 1.3 We fit the calibration model using the method "wild" (the default): -CalibAlien <- calibfit(data = CalibDataAlien, isofit = GermanFit) - -## 1.4 We explore the outcome of the calibration: -CalibAlien -summary(CalibAlien) -plot(CalibAlien) - -## Note 1: you can plot several calibrations at once (using bats this time): -CalibBat1 <- calibfit(data = CalibDataBat, isofit = GermanFit) -CalibBat2 <- calibfit(data = CalibDataBat2, isofit = GermanFit) -plot(CalibBat1) -points(CalibBat2, pch = 3, col = "red", CI = list(col = "green")) - -## Note 2: you can extract data created by plot() for plotting things yourself: -dataplot <- plot(CalibAlien, plot = FALSE) -plot(sample_fitted ~ source_value, data = dataplot, - xlim = range(dataplot$source_value), - ylim = range(dataplot$sample_lwr, dataplot$sample_upr), col = NULL) -polygon(x = c(dataplot$source_value, rev(dataplot$source_value)), - y = c(dataplot$sample_lwr, rev(dataplot$sample_upr)), - col = 3) -points(sample_fitted ~ source_value, data = dataplot, type = "l", lty = 2) - - -#################################################### -## 2 Example of calibration using the method "lab" # -#################################################### - -## 2.0 We create made up data here because we don't have yet a good dataset -## for this case, but you should use your own data instead: -GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) -set.seed(123) -CalibDataAlien2 <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, - resid_var = 5), - isoscape = GermanScape, - raster = ElevRasterDE, - n_sites = 25, - min_n_samples = 5, - max_n_samples = 5) -CalibDataAlien2 <- CalibDataAlien2[, c("site_ID", "sample_ID", "source_value", - "sample_value")] -head(CalibDataAlien2) ## your data should have this structure - -## 2.1 We fit the calibration model using the method "lab": -CalibAlien2 <- calibfit(data = CalibDataAlien2, method = "lab") - -## 2.2 We explore the outcome of the calibration: -CalibAlien2 -summary(CalibAlien2) -plot(CalibAlien2) - - -##################################################### -## 3 Example of calibration using the method "desk" # -##################################################### - -## 3.1 We format the information about the calibration function to be used -## as a dataframe: -CalibDataAlien3 <- data.frame(intercept = 1.67, slope = 0.48, - intercept_se = 1.65, slope_se = 0.03, - resid_var = 3.96) -CalibDataAlien3 - -## 3.2 We fit the calibration model using the method "desk": -CalibAlien3 <- calibfit(data = CalibDataAlien3, method = "desk") - -## 3.3 We explore the outcome of the calibration: -CalibAlien3 -summary(CalibAlien3) -plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) - -## Note: the desk function also work with just intercept and slope: -CalibDataAlien4 <- CalibDataAlien3[, c("intercept", "slope")] -CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk") -CalibAlien4 -summary(CalibAlien4) -plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) -points(CalibAlien4, line = list(col = "orange")) -## Regression lines are the same, but the new calibration does not have a -## confidence intervals since we provided no uncertainty measure in -## CalibDataAlien4, which will make a difference during assignments... - - -############################################################# -## 4 Example of calibration using the method "desk_inverse" # -############################################################# - -## 4.1 We format the information about the calibration function to be used -## as a dataframe: -CalibDataAlien4 <- data.frame(intercept = -16.98822, slope = 1.588885, - intercept_se = 2.200435, slope_se = 0.08106032, - resid_var = 13.15102, N = 125, sign_mean_Y = -1) -CalibDataAlien4 - -## 4.2 We fit the calibration model using the method "desk_inverse": -CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk_inverse") - -## 4.3 We explore the outcome of the calibration: -CalibAlien4 -summary(CalibAlien4) -plot(CalibAlien4, xlim = c(-100, 100), ylim = c(-50, 50)) + ##################################################### + ## 1 Example of calibration using the method "wild" # + ##################################################### + + ## 1.1 We prepare the data to fit the isoscape: + GNIPDataDEagg <- prepsources(data = GNIPDataDE) + + ## 1.2 We fit the isoscape models for Germany: + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = TRUE) + ) + + ## 1.3 We fit the calibration model using the method "wild" (the default): + CalibAlien <- calibfit(data = CalibDataAlien, isofit = GermanFit) + + ## 1.4 We explore the outcome of the calibration: + CalibAlien + summary(CalibAlien) + plot(CalibAlien) + + ## Note 1: you can plot several calibrations at once (using bats this time): + CalibBat1 <- calibfit(data = CalibDataBat, isofit = GermanFit) + CalibBat2 <- calibfit(data = CalibDataBat2, isofit = GermanFit) + plot(CalibBat1) + points(CalibBat2, pch = 3, col = "red", CI = list(col = "green")) + + ## Note 2: you can extract data created by plot() + ## for plotting things yourself: + dataplot <- plot(CalibAlien, plot = FALSE) + plot(sample_fitted ~ source_value, + data = dataplot, + xlim = range(dataplot$source_value), + ylim = range(dataplot$sample_lwr, dataplot$sample_upr), col = NULL + ) + polygon( + x = c(dataplot$source_value, rev(dataplot$source_value)), + y = c(dataplot$sample_lwr, rev(dataplot$sample_upr)), + col = 3 + ) + points(sample_fitted ~ source_value, data = dataplot, type = "l", lty = 2) + + + #################################################### + ## 2 Example of calibration using the method "lab" # + #################################################### + + ## 2.0 We create made up data here because we don't have yet a good dataset + ## for this case, but you should use your own data instead: + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) + set.seed(123) + CalibDataAlien2 <- create_aliens( + calib_fn = list( + intercept = 3, slope = 0.5, + resid_var = 5 + ), + isoscape = GermanScape, + raster = ElevRasterDE, + n_sites = 25, + min_n_samples = 5, + max_n_samples = 5 + ) + CalibDataAlien2 <- CalibDataAlien2[, c( + "site_ID", "sample_ID", + "source_value", "sample_value" + )] + head(CalibDataAlien2) ## your data should have this structure + + ## 2.1 We fit the calibration model using the method "lab": + CalibAlien2 <- calibfit(data = CalibDataAlien2, method = "lab") + + ## 2.2 We explore the outcome of the calibration: + CalibAlien2 + summary(CalibAlien2) + plot(CalibAlien2) + + + ##################################################### + ## 3 Example of calibration using the method "desk" # + ##################################################### + + ## 3.1 We format the information about the calibration function to be used + ## as a dataframe: + CalibDataAlien3 <- data.frame( + intercept = 1.67, slope = 0.48, + intercept_se = 1.65, slope_se = 0.03, + resid_var = 3.96 + ) + CalibDataAlien3 + + ## 3.2 We fit the calibration model using the method "desk": + CalibAlien3 <- calibfit(data = CalibDataAlien3, method = "desk") + + ## 3.3 We explore the outcome of the calibration: + CalibAlien3 + summary(CalibAlien3) + plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) + + ## Note: the desk function also work with just intercept and slope: + CalibDataAlien4 <- CalibDataAlien3[, c("intercept", "slope")] + CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk") + CalibAlien4 + summary(CalibAlien4) + plot(CalibAlien3, xlim = c(-100, 100), ylim = c(-50, 50)) + points(CalibAlien4, line = list(col = "orange")) + ## Regression lines are the same, but the new calibration does not have a + ## confidence intervals since we provided no uncertainty measure in + ## CalibDataAlien4, which will make a difference during assignments... + + + ############################################################# + ## 4 Example of calibration using the method "desk_inverse" # + ############################################################# + + ## 4.1 We format the information about the calibration function to be used + ## as a dataframe: + CalibDataAlien4 <- data.frame( + intercept = -16.98822, slope = 1.588885, + intercept_se = 2.200435, slope_se = 0.08106032, + resid_var = 13.15102, N = 125, sign_mean_Y = -1 + ) + CalibDataAlien4 + + ## 4.2 We fit the calibration model using the method "desk_inverse": + CalibAlien4 <- calibfit(data = CalibDataAlien4, method = "desk_inverse") + + ## 4.3 We explore the outcome of the calibration: + CalibAlien4 + summary(CalibAlien4) + plot(CalibAlien4, xlim = c(-100, 100), ylim = c(-50, 50)) } } diff --git a/IsoriX/man/create_aliens.Rd b/IsoriX/man/create_aliens.Rd index 7467f22..5aef4d1 100644 --- a/IsoriX/man/create_aliens.Rd +++ b/IsoriX/man/create_aliens.Rd @@ -59,7 +59,7 @@ provided to this function. An alternative possibility is to provide a data frame containing three columns (\code{site_ID}, \code{long} and \code{lat}) to input the coordinate of the sampling site manually. -Irrespectively of how locations are chosen, a random number of observations +Irrespective of how locations are chosen, a random number of observations will be drawn, at each site, according to a uniform distribution bounded by the values of the argument \code{min_n_samples} and \code{max_n_samples}. @@ -78,44 +78,48 @@ centred around such mean and a variance defined by the residual variance ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { - -## We fit the models for Germany -GNIPDataDEagg <- prepsources(data = GNIPDataDE) - -GermanFit <- isofit(data = GNIPDataDEagg) - -## We build the isoscapes -GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) - -## We create a simulated dataset with 25 sites and 5 observations per site -Aliens <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), - isoscape = GermanScape, - raster = ElevRasterDE, - n_sites = 25, - min_n_samples = 5, - max_n_samples = 5) - -## We display the simulated dataset -Aliens - -## We plot the relationship between the environmental isotope values -## and those from the simulated organisms -plot(sample_value ~ source_value, data = Aliens, ylab = "Tissue", xlab = "Environment") -abline(3, 0.5, col = "blue") ## the true relationship - -## We create a simulated dataset with 2 sites imputing coordinates manually -Aliens2 <- create_aliens(calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), - isoscape = GermanScape, - coordinates = data.frame(site_ID = c("Berlin", "Bielefeld"), - long = c(13.52134, 8.49914), - lat = c(52.50598, 52.03485)), - raster = ElevRasterDE, - min_n_samples = 5, - max_n_samples = 5) - -Aliens2 - +if (getOption_IsoriX("example_maxtime") > 30) { + ## We fit the models for Germany + GNIPDataDEagg <- prepsources(data = GNIPDataDE) + + GermanFit <- isofit(data = GNIPDataDEagg) + + ## We build the isoscapes + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) + + ## We create a simulated dataset with 25 sites and 5 observations per site + Aliens <- create_aliens( + calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), + isoscape = GermanScape, + raster = ElevRasterDE, + n_sites = 25, + min_n_samples = 5, + max_n_samples = 5 + ) + + ## We display the simulated dataset + Aliens + + ## We plot the relationship between the environmental isotope values + ## and those from the simulated organisms + plot(sample_value ~ source_value, data = Aliens, ylab = "Tissue", xlab = "Environment") + abline(3, 0.5, col = "blue") ## the true relationship + + ## We create a simulated dataset with 2 sites imputing coordinates manually + Aliens2 <- create_aliens( + calib_fn = list(intercept = 3, slope = 0.5, resid_var = 5), + isoscape = GermanScape, + coordinates = data.frame( + site_ID = c("Berlin", "Bielefeld"), + long = c(13.52134, 8.49914), + lat = c(52.50598, 52.03485) + ), + raster = ElevRasterDE, + min_n_samples = 5, + max_n_samples = 5 + ) + + Aliens2 } } diff --git a/IsoriX/man/getprecip.Rd b/IsoriX/man/getprecip.Rd index e4feebb..b9f9b33 100644 --- a/IsoriX/man/getprecip.Rd +++ b/IsoriX/man/getprecip.Rd @@ -50,6 +50,6 @@ file again, specifying overwrite = TRUE to overwrite the corrupted file. ## To download the monthly precipitation ## in your current working ## directory, just type: -## getprecip() +## getprecip(path = "~/Downloads/") ## Mind that the file weights ca. 1GB! } diff --git a/IsoriX/man/isofind.Rd b/IsoriX/man/isofind.Rd index d78fbda..3c9e370 100644 --- a/IsoriX/man/isofind.Rd +++ b/IsoriX/man/isofind.Rd @@ -102,71 +102,85 @@ perform the assignment and their names. ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 200) { - -## We fit the models for Germany -GNIPDataDEagg <- prepsources(data = GNIPDataDE) - -GermanFit <- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) - - -## We build the isoscape -GermanScape <- isoscape(raster = ElevRasterDE, - isofit = GermanFit) - - -## We fit the calibration model -CalibAlien <- calibfit(data = CalibDataAlien, - isofit = GermanFit) - -## We perform the assignment on land only -AssignmentDry <- isofind(data = AssignDataAlien, - isoscape = GermanScape, - calibfit = CalibAlien) - -## perform the assignment on land and water -Assignment <- isofind(data = AssignDataAlien, - isoscape = GermanScape, - calibfit = CalibAlien, - mask = NULL) - -## We plot the group assignment -plot(Assignment, who = "group", mask = list(mask = NULL)) - -plot(AssignmentDry, who = "group", mask = list(mask = NULL)) - -## We plot the assignment for the 8 first samples -plot(AssignmentDry, who = 1:8, - sources = list(draw = FALSE), - calibs = list(draw = FALSE)) - -## We plot the assignment for the sample "Alien_10" -plot(AssignmentDry, who = "Alien_10") - - -### Other example without calibration: -### We will try to assign a weather station -### in the water isoscape - -## We create the assignment data taking -## GARMISCH-PARTENKIRCHEN as the station to assign -GPIso <- GNIPDataDEagg[GNIPDataDEagg$source_ID == "GARMISCH-PARTENKIRCHEN", "mean_source_value"] -AssignDataGP <- data.frame(sample_value = GPIso, - sample_ID = "GARMISCH-PARTENKIRCHEN") - -## We perform the assignment -AssignedGP <- isofind(data = AssignDataGP, - isoscape = GermanScape, - calibfit = NULL) -## We plot the assignment and -## show where the station really is (using lattice) -plot(AssignedGP) + - xyplot(47.48 ~ 11.06, - panel = panel.points, - cex = 5, pch = 13, lwd = 2, col = "black") - - +if (getOption_IsoriX("example_maxtime") > 200) { + ## We fit the models for Germany + GNIPDataDEagg <- prepsources(data = GNIPDataDE) + + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = TRUE) + ) + + + ## We build the isoscape + GermanScape <- isoscape( + raster = ElevRasterDE, + isofit = GermanFit + ) + + + ## We fit the calibration model + CalibAlien <- calibfit( + data = CalibDataAlien, + isofit = GermanFit + ) + + ## We perform the assignment on land only + AssignmentDry <- isofind( + data = AssignDataAlien, + isoscape = GermanScape, + calibfit = CalibAlien + ) + + ## perform the assignment on land and water + Assignment <- isofind( + data = AssignDataAlien, + isoscape = GermanScape, + calibfit = CalibAlien, + mask = NULL + ) + + ## We plot the group assignment + plot(Assignment, who = "group", mask = list(mask = NULL)) + + plot(AssignmentDry, who = "group", mask = list(mask = NULL)) + + ## We plot the assignment for the 8 first samples + plot(AssignmentDry, + who = 1:8, + sources = list(draw = FALSE), + calibs = list(draw = FALSE) + ) + + ## We plot the assignment for the sample "Alien_10" + plot(AssignmentDry, who = "Alien_10") + + + ### Other example without calibration: + ### We will try to assign a weather station + ### in the water isoscape + + ## We create the assignment data taking + ## GARMISCH-PARTENKIRCHEN as the station to assign + GPIso <- GNIPDataDEagg[GNIPDataDEagg$source_ID == "GARMISCH-PARTENKIRCHEN", "mean_source_value"] + AssignDataGP <- data.frame( + sample_value = GPIso, + sample_ID = "GARMISCH-PARTENKIRCHEN" + ) + + ## We perform the assignment + AssignedGP <- isofind( + data = AssignDataGP, + isoscape = GermanScape, + calibfit = NULL + ) + ## We plot the assignment and + ## show where the station really is (using lattice) + plot(AssignedGP) + + xyplot(47.48 ~ 11.06, + panel = panel.points, + cex = 5, pch = 13, lwd = 2, col = "black" + ) } } diff --git a/IsoriX/man/isofit.Rd b/IsoriX/man/isofit.Rd index 02e78d6..787819f 100644 --- a/IsoriX/man/isofit.Rd +++ b/IsoriX/man/isofit.Rd @@ -180,7 +180,7 @@ Never use a mean_fit object to draw predictions without considering a disp_fit object: mean_fit is not fitted independently from disp_fit. For all methods, fixed effects are being estimated by Maximum Likelihood -(ML) and dispersion parameters (i.e. random effects and Matern correlation +(ML) and dispersion parameters (i.e. random effects and Matérn correlation parameters) are estimated by Restricted Maximum Likelihood (REML). Using REML provides more accurate prediction intervals but impedes the accuracy of Likelihood Ratio Tests (LRT). Our choice for REML was motivated by the @@ -203,23 +203,21 @@ other covariates would be useful to add in IsoriX. ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 10) { +if (getOption_IsoriX("example_maxtime") > 10) { + ## Fitting the models for Germany + GNIPDataDEagg <- prepsources(data = GNIPDataDE) -## Fitting the models for Germany -GNIPDataDEagg <- prepsources(data = GNIPDataDE) + GermanFit <- isofit(data = GNIPDataDEagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) -GermanFit <- isofit(data = GNIPDataDEagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) + GermanFit -GermanFit - -## Diagnostics for the fits -plot(GermanFit) - -## Exploration of the fitted models -GermanFit$mean_fit -GermanFit$disp_fit -AIC(GermanFit$disp_fit) + ## Diagnostics for the fits + plot(GermanFit) + ## Exploration of the fitted models + GermanFit$mean_fit + GermanFit$disp_fit + AIC(GermanFit$disp_fit) } } diff --git a/IsoriX/man/isomultifit.Rd b/IsoriX/man/isomultifit.Rd index 0f6af41..d1171f2 100644 --- a/IsoriX/man/isomultifit.Rd +++ b/IsoriX/man/isomultifit.Rd @@ -82,23 +82,24 @@ This function is a wrapper around the function \link{isofit}. ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { +if (getOption_IsoriX("example_maxtime") > 30) { + ## We prepare the GNIP monthly data between January and June for Germany -## We prepare the GNIP monthly data between January and June for Germany + GNIPDataDEmonthly <- prepsources( + data = GNIPDataDE, + month = 1:6, + split_by = "month" + ) -GNIPDataDEmonthly <- prepsources(data = GNIPDataDE, - month = 1:6, - split_by = "month") + head(GNIPDataDEmonthly) -head(GNIPDataDEmonthly) + ## We fit the isoscapes -## We fit the isoscapes + GermanMonthlyFit <- isomultifit(data = GNIPDataDEmonthly) -GermanMonthlyFit <- isomultifit(data = GNIPDataDEmonthly) + GermanMonthlyFit -GermanMonthlyFit - -plot(GermanMonthlyFit) + plot(GermanMonthlyFit) } } \seealso{ diff --git a/IsoriX/man/isomultiscape.Rd b/IsoriX/man/isomultiscape.Rd index 77a34a5..4bb970a 100644 --- a/IsoriX/man/isomultiscape.Rd +++ b/IsoriX/man/isomultiscape.Rd @@ -40,41 +40,49 @@ averages precipitation weighted isoscapes. ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 180) { - -## We prepare the data and split them by month: - -GNIPDataDEmonthly <- prepsources(data = GNIPDataDE, - split_by = "month") - -dim(GNIPDataDEmonthly) - -## We fit the isoscapes:#' -GermanMultiFit <- isomultifit(data = GNIPDataDEmonthly, - mean_model_fix = list(elev = TRUE, lat.abs = TRUE)) - -## We build the annual isoscapes by simple averaging (equal weighting): -GermanMultiscape <- isomultiscape(raster = ElevRasterDE, - isofit = GermanMultiFit) - -## We build the annual isoscapes with a weighing based on precipitation amount: -GermanMultiscapeWeighted <- isomultiscape(raster = ElevRasterDE, - isofit = GermanMultiFit, - weighting = PrecipBrickDE) - -## We plot the mean isoscape of the averaging with equal weighting: -plot(x = GermanMultiscape, which = "mean") - -## We plot the mean isoscape of the averaging with precipitation weighting: -plot(x = GermanMultiscapeWeighted, which = "mean") - -## We build the isoscapes for a given month (here January): -GermanScapeJan <- isoscape(raster = ElevRasterDE, - isofit = GermanMultiFit$multi_fits[["month_1"]]) - -## We plot the mean isoscape for January: -plot(x = GermanScapeJan, which = "mean") - +if (getOption_IsoriX("example_maxtime") > 180) { + ## We prepare the data and split them by month: + + GNIPDataDEmonthly <- prepsources( + data = GNIPDataDE, + split_by = "month" + ) + + dim(GNIPDataDEmonthly) + + ## We fit the isoscapes:#' + GermanMultiFit <- isomultifit( + data = GNIPDataDEmonthly, + mean_model_fix = list(elev = TRUE, lat.abs = TRUE) + ) + + ## We build the annual isoscapes by simple averaging (equal weighting): + GermanMultiscape <- isomultiscape( + raster = ElevRasterDE, + isofit = GermanMultiFit + ) + + ## We build the annual isoscapes with a weighing based on precipitation amount: + GermanMultiscapeWeighted <- isomultiscape( + raster = ElevRasterDE, + isofit = GermanMultiFit, + weighting = PrecipBrickDE + ) + + ## We plot the mean isoscape of the averaging with equal weighting: + plot(x = GermanMultiscape, which = "mean") + + ## We plot the mean isoscape of the averaging with precipitation weighting: + plot(x = GermanMultiscapeWeighted, which = "mean") + + ## We build the isoscapes for a given month (here January): + GermanScapeJan <- isoscape( + raster = ElevRasterDE, + isofit = GermanMultiFit$multi_fits[["month_1"]] + ) + + ## We plot the mean isoscape for January: + plot(x = GermanScapeJan, which = "mean") } } \seealso{ diff --git a/IsoriX/man/isopalette2.Rd b/IsoriX/man/isopalette2.Rd index d8c002d..51aab1a 100644 --- a/IsoriX/man/isopalette2.Rd +++ b/IsoriX/man/isopalette2.Rd @@ -41,37 +41,55 @@ lattice graphic environment (see source for details). ## A comparison of some colour palette par(mfrow = c(2, 3)) -pie(rep(1, length(isopalette1)), col = isopalette1, - border = NA, labels = NA, clockwise = TRUE, main = "isopalette1") -pie(rep(1, length(isopalette2)), col = isopalette2, - border = NA, labels = NA, clockwise = TRUE, main = "isopalette2") -pie(rep(1, 100), col = terrain.colors(100), border = NA, labels = NA, - clockwise = TRUE, main = "terrain.colors") -pie(rep(1, 100), col = rainbow(100), border = NA, labels = NA, - clockwise = TRUE, main = "rainbow") -pie(rep(1, 100), col = topo.colors(100), border = NA, labels = NA, - clockwise = TRUE, main = "topo.colors") -pie(rep(1, 100), col = heat.colors(100), border = NA, labels = NA, - clockwise = TRUE, main = "heat.colors") +pie(rep(1, length(isopalette1)), + col = isopalette1, + border = NA, labels = NA, clockwise = TRUE, main = "isopalette1" +) +pie(rep(1, length(isopalette2)), + col = isopalette2, + border = NA, labels = NA, clockwise = TRUE, main = "isopalette2" +) +pie(rep(1, 100), + col = terrain.colors(100), border = NA, labels = NA, + clockwise = TRUE, main = "terrain.colors" +) +pie(rep(1, 100), + col = rainbow(100), border = NA, labels = NA, + clockwise = TRUE, main = "rainbow" +) +pie(rep(1, 100), + col = topo.colors(100), border = NA, labels = NA, + clockwise = TRUE, main = "topo.colors" +) +pie(rep(1, 100), + col = heat.colors(100), border = NA, labels = NA, + clockwise = TRUE, main = "heat.colors" +) ## Creating your own colour palette MyPalette <- colorRampPalette(c("blue", "green", "red"), bias = 0.7) par(mfrow = c(1, 1)) -pie(1:100, col = MyPalette(100), border = NA, labels = NA, - clockwise = TRUE, main = "a home-made palette") +pie(1:100, + col = MyPalette(100), border = NA, labels = NA, + clockwise = TRUE, main = "a home-made palette" +) ## Turing palettes into functions for use in IsoriX Isopalette1Fn <- colorRampPalette(isopalette1, bias = 0.5) Isopalette2Fn <- colorRampPalette(isopalette2, bias = 0.5) par(mfrow = c(1, 2)) -pie(1:100, col = Isopalette1Fn(100), border = NA, labels = NA, - clockwise = TRUE, main = "isopalette1") -pie(1:100, col = Isopalette2Fn(100), border = NA, labels = NA, - clockwise = TRUE, main = "isopalette2") +pie(1:100, + col = Isopalette1Fn(100), border = NA, labels = NA, + clockwise = TRUE, main = "isopalette1" +) +pie(1:100, + col = Isopalette2Fn(100), border = NA, labels = NA, + clockwise = TRUE, main = "isopalette2" +) } \seealso{ -\link[grDevices:palettes]{grDevices::rainbow} for information about R colour palettes,#' +\link[grDevices:palettes]{grDevices::rainbow} for information about R colour palettes, \link[grDevices:colorRamp]{grDevices::colorRamp} and \link[colorspace:choose_palette]{colorspace::choose_palette} to create your own palettes } diff --git a/IsoriX/man/isoscape.Rd b/IsoriX/man/isoscape.Rd index 89894d0..291d812 100644 --- a/IsoriX/man/isoscape.Rd +++ b/IsoriX/man/isoscape.Rd @@ -80,50 +80,50 @@ The isoscape can be plotted using the function \link{plot.ISOSCAPE} ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { - -## We prepare the data -GNIPDataDEagg <- prepsources(data = GNIPDataDE) - -## We fit the models -GermanFit <- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) - -## We build the isoscapes -GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) - -GermanScape -plot(GermanScape) - -## We build more plots -PlotMean <- plot(x = GermanScape, which = "mean", plot = FALSE) - -PlotMeanPredVar <- plot(x = GermanScape, which = "mean_predVar", plot = FALSE) - -PlotMeanResidVar <- plot(x = GermanScape, which = "mean_residVar", plot = FALSE) - -PlotMeanRespVar <- plot(x = GermanScape, which = "mean_respVar", plot = FALSE) - -## We display the plots -print(PlotMean, split = c(1, 1, 2, 2), more = TRUE) -print(PlotMeanPredVar, split = c(2, 1, 2, 2), more = TRUE) -print(PlotMeanResidVar, split = c(1, 2, 2, 2), more = TRUE) -print(PlotMeanRespVar, split = c(2, 2, 2, 2), more = FALSE) - -## We build a sphere with our isoscape -plot(x = GermanScape, which = "mean", plot = FALSE, sphere = list(build = TRUE)) - -## We can save a rotating sphere with the isoscape as a .gif-file. -## This file will be located inside your working directory. -## Make sure your current rgl device (from the previous step) is still open -## and that you have both the packages 'rgl' and 'magick' installed. -## The building of the .gif implies to create temporarily many .png -## but those will be removed automatically once the .gif is done. -## Uncomment to proceed (after making sure you have rgl, magick & webshot2 installed) -#if(require("rgl") && require("magick") && require("webshot2")) { -# movie3d(spin3d(axis = c(0, 0, 1), rpm = 2), duration = 30, dir = getwd()) -#} - +if (getOption_IsoriX("example_maxtime") > 30) { + ## We prepare the data + GNIPDataDEagg <- prepsources(data = GNIPDataDE) + + ## We fit the models + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = TRUE) + ) + + ## We build the isoscapes + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) + + GermanScape + plot(GermanScape) + + ## We build more plots + PlotMean <- plot(x = GermanScape, which = "mean", plot = FALSE) + + PlotMeanPredVar <- plot(x = GermanScape, which = "mean_predVar", plot = FALSE) + + PlotMeanResidVar <- plot(x = GermanScape, which = "mean_residVar", plot = FALSE) + + PlotMeanRespVar <- plot(x = GermanScape, which = "mean_respVar", plot = FALSE) + + ## We display the plots + print(PlotMean, split = c(1, 1, 2, 2), more = TRUE) + print(PlotMeanPredVar, split = c(2, 1, 2, 2), more = TRUE) + print(PlotMeanResidVar, split = c(1, 2, 2, 2), more = TRUE) + print(PlotMeanRespVar, split = c(2, 2, 2, 2), more = FALSE) + + ## We build a sphere with our isoscape + plot(x = GermanScape, which = "mean", plot = FALSE, sphere = list(build = TRUE)) + + ## We can save a rotating sphere with the isoscape as a .gif-file. + ## This file will be located inside your working directory. + ## Make sure your current rgl device (from the previous step) is still open + ## and that you have both the packages 'rgl' and 'magick' installed. + ## The building of the .gif implies to create temporarily many .png + ## but those will be removed automatically once the .gif is done. + ## Uncomment to proceed (after making sure you have rgl, magick & webshot2 installed) + # if(require("rgl") && require("magick") && require("webshot2")) { + # movie3d(spin3d(axis = c(0, 0, 1), rpm = 2), duration = 30, dir = getwd()) + # } } } diff --git a/IsoriX/man/options.Rd b/IsoriX/man/options.Rd index 1c3d99c..8ba686a 100644 --- a/IsoriX/man/options.Rd +++ b/IsoriX/man/options.Rd @@ -16,6 +16,7 @@ getOption_IsoriX(x = NULL) \item{example_maxtime}{The number of seconds allowed for a given example to run. It is used to control whether the longer examples should be run or not based on the comparison between this option and the approximate running time of the example on our computers.} \item{Ncpu}{An \emph{integer} corresponding to the number of cores to be used (in functions that can handle parallel processing)} \item{dont_ask}{A \emph{logical} indicating if the user prompt during interactive session during plotting must be inactivated (for development purposes only)} +\item{spaMM_debugmod}{A \emph{logical} indicating if the warnings and errors produced by the spaMM package should stopped being turned into messages (for development purposes only)} }} \item{x}{A character string holding an option name.} diff --git a/IsoriX/man/prepcipitate.Rd b/IsoriX/man/prepcipitate.Rd index e6b9fba..b9e7c3c 100644 --- a/IsoriX/man/prepcipitate.Rd +++ b/IsoriX/man/prepcipitate.Rd @@ -31,25 +31,25 @@ extent as the structural raster. This function is designed to be used with ## It will therefore not be run unless you uncomment it ### We fit the models for Germany: -#GNIPDataDEagg <- prepsources(data = GNIPDataDE) +# GNIPDataDEagg <- prepsources(data = GNIPDataDE) # -#GermanFit <- isofit(data = GNIPDataDEagg, +# GermanFit <- isofit(data = GNIPDataDEagg, # mean_model_fix = list(elev = TRUE, lat.abs = TRUE)) # ### We prepare the structural raster: -#StrRaster <- prepraster(raster = ElevRasterDE, -# isofit = GermanFit, -# aggregation_factor = 0) +# StrRaster <- prepraster(raster = ElevRasterDE, +# isofit = GermanFit, +# aggregation_factor = 0) # ### We download the precipitation data: -#getprecip(path = "~/Downloads/") +# getprecip(path = "~/Downloads/") # ### We prepare the raster brick with all the precipitation data: -#PrecipitationBrick <- prepcipitate(path = "~/Downloads/", -# raster = StrRaster) +# PrecipitationBrick <- prepcipitate(path = "~/Downloads/", +# raster = StrRaster) # ### We plot the precipitation data: -#levelplot(PrecipitationBrick) +# levelplot(PrecipitationBrick) } \seealso{ diff --git a/IsoriX/man/prepraster.Rd b/IsoriX/man/prepraster.Rd index 0ce52be..fead2ab 100644 --- a/IsoriX/man/prepraster.Rd +++ b/IsoriX/man/prepraster.Rd @@ -92,52 +92,56 @@ predictions. ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 30) { +if (getOption_IsoriX("example_maxtime") > 30) { + ## We fit the models for Germany + GNIPDataDEagg <- prepsources(data = GNIPDataDE) -## We fit the models for Germany -GNIPDataDEagg <- prepsources(data = GNIPDataDE) + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = TRUE) + ) -GermanFit <- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) + ### Let's explore the difference between aggregation schemes -### Let's explore the difference between aggregation schemes - -## We aggregate and crop using different settings -ElevationRaster1 <- prepraster( + ## We aggregate and crop using different settings + ElevationRaster1 <- prepraster( raster = ElevRasterDE, isofit = GermanFit, margin_pct = 0, - aggregation_factor = 0) + aggregation_factor = 0 + ) -ElevationRaster2 <- prepraster( + ElevationRaster2 <- prepraster( raster = ElevRasterDE, isofit = GermanFit, margin_pct = 5, - aggregation_factor = 5) + aggregation_factor = 5 + ) -ElevationRaster3 <- prepraster( + ElevationRaster3 <- prepraster( raster = ElevRasterDE, isofit = GermanFit, margin_pct = 10, - aggregation_factor = 5, aggregation_fn = max) + aggregation_factor = 5, aggregation_fn = max + ) -## We plot the outcome of the 3 different aggregation schemes using terra + ## We plot the outcome of the 3 different aggregation schemes using terra -oripar <- par(mfrow = c(1, 3)) ## display 3 plots side-by-side + oripar <- par(mfrow = c(1, 3)) ## display 3 plots side-by-side -plot(ElevationRaster1, main = "Original small raster") -polys(CountryBorders) -polys(OceanMask, col = "blue") + plot(ElevationRaster1, main = "Original small raster") + polys(CountryBorders) + polys(OceanMask, col = "blue") -plot(ElevationRaster2, main = "Small raster aggregated (by mean)") -polys(CountryBorders) -polys(OceanMask, col = "blue") + plot(ElevationRaster2, main = "Small raster aggregated (by mean)") + polys(CountryBorders) + polys(OceanMask, col = "blue") -plot(ElevationRaster3, main = "Small raster aggregated (by max)") -polys(CountryBorders) -polys(OceanMask, col = "blue") + plot(ElevationRaster3, main = "Small raster aggregated (by max)") + polys(CountryBorders) + polys(OceanMask, col = "blue") -par(oripar) ## restore graphical settings + par(oripar) ## restore graphical settings } ## The examples below will only be run if sufficient time is allowed @@ -145,24 +149,22 @@ par(oripar) ## restore graphical settings ## if you want to allow for examples taking up to ca. XX seconds to run ## (so don't write XX but put a number instead!) -if(getOption_IsoriX("example_maxtime") > 10) { - -### Let's create a raster centered around the pacific - -## We first create an empty raster -EmptyRaster <- rast(matrix(0, ncol = 360, nrow = 180)) -ext(EmptyRaster) <- c(-180, 180, -90, 90) -crs(EmptyRaster) <- "+proj=longlat +datum=WGS84" +if (getOption_IsoriX("example_maxtime") > 10) { + ### Let's create a raster centered around the pacific -## We crop it around the pacific -PacificA <- prepraster(EmptyRaster, manual_crop = c(110, -70, -90, 90)) -ext(PacificA) # note that the extent has changed! + ## We first create an empty raster + EmptyRaster <- rast(matrix(0, ncol = 360, nrow = 180)) + ext(EmptyRaster) <- c(-180, 180, -90, 90) + crs(EmptyRaster) <- "+proj=longlat +datum=WGS84" -## We plot (note the use of the function shift()!) -plot(PacificA, col = "blue", legend = FALSE) -polys(CountryBorders, col = "black") -polys(shift(CountryBorders, dx = 360), col = "black") + ## We crop it around the pacific + PacificA <- prepraster(EmptyRaster, manual_crop = c(110, -70, -90, 90)) + ext(PacificA) # note that the extent has changed! + ## We plot (note the use of the function shift()!) + plot(PacificA, col = "blue", legend = FALSE) + polys(CountryBorders, col = "black") + polys(shift(CountryBorders, dx = 360), col = "black") } } diff --git a/IsoriX/man/prepsources.Rd b/IsoriX/man/prepsources.Rd index ab0fe18..88b26ba 100644 --- a/IsoriX/man/prepsources.Rd +++ b/IsoriX/man/prepsources.Rd @@ -128,45 +128,57 @@ GNIPDataDEagg <- prepsources(data = GNIPDataDE) head(GNIPDataDEagg) ## Create a processed dataset for Germany per month -GNIPDataDEmonthly <-prepsources(data = GNIPDataDE, - split_by = "month") +GNIPDataDEmonthly <- prepsources( + data = GNIPDataDE, + split_by = "month" +) head(GNIPDataDEmonthly) ## Create a processed dataset for Germany per year -GNIPDataDEyearly <- prepsources(data = GNIPDataDE, - split_by = "year") +GNIPDataDEyearly <- prepsources( + data = GNIPDataDE, + split_by = "year" +) head(GNIPDataDEyearly) ## Create isoscape-dataset for warm months in germany between 1995 and 1996 -GNIPDataDEwarm <- prepsources(data = GNIPDataDE, - month = 5:8, - year = 1995:1996) +GNIPDataDEwarm <- prepsources( + data = GNIPDataDE, + month = 5:8, + year = 1995:1996 +) head(GNIPDataDEwarm) ## Create a dataset with 90\% of obs -GNIPDataDE90pct <- prepsources(data = GNIPDataDE, - prop_random = 0.9, - random_level = "obs") +GNIPDataDE90pct <- prepsources( + data = GNIPDataDE, + prop_random = 0.9, + random_level = "obs" +) lapply(GNIPDataDE90pct, head) # show beginning of both datasets ## Create a dataset with half the weather sources -GNIPDataDE50pctsources <- prepsources(data = GNIPDataDE, - prop_random = 0.5, - random_level = "source") +GNIPDataDE50pctsources <- prepsources( + data = GNIPDataDE, + prop_random = 0.5, + random_level = "source" +) lapply(GNIPDataDE50pctsources, head) ## Create a dataset with half the weather sources split per month -GNIPDataDE50pctsourcesMonthly <- prepsources(data = GNIPDataDE, - split_by = "month", - prop_random = 0.5, - random_level = "source") +GNIPDataDE50pctsourcesMonthly <- prepsources( + data = GNIPDataDE, + split_by = "month", + prop_random = 0.5, + random_level = "source" +) lapply(GNIPDataDE50pctsourcesMonthly, head) diff --git a/IsoriX/man/reexports.Rd b/IsoriX/man/reexports.Rd index 30946de..e0735a6 100644 --- a/IsoriX/man/reexports.Rd +++ b/IsoriX/man/reexports.Rd @@ -35,7 +35,7 @@ below to see their documentation. \item{latticeExtra}{\code{\link[latticeExtra]{layer}}} - \item{rasterVis}{\code{\link[rasterVis:levelplot-methods]{levelplot}}, \code{\link[rasterVis:rasterTheme]{RdBuTheme}}} + \item{rasterVis}{\code{\link[rasterVis:rasterTheme]{RdBuTheme}}, \code{\link[rasterVis:levelplot-methods]{levelplot}}} \item{spaMM}{\code{\link[spaMM:get_fittedPars]{get_ranPars}}} diff --git a/IsoriX/man/serialize.Rd b/IsoriX/man/serialize.Rd new file mode 100644 index 0000000..9677bcd --- /dev/null +++ b/IsoriX/man/serialize.Rd @@ -0,0 +1,187 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/serialize.R +\name{serialize} +\alias{serialize} +\alias{serialise} +\alias{readRDS} +\alias{saveRDS} +\alias{saveRDS_IsoriX} +\alias{saveRDS.ISOSCAPE} +\alias{saveRDS.CALIBFIT} +\alias{saveRDS.ISOFIND} +\alias{readRDS.character} +\alias{saveRDS,ISOSCAPE-method} +\alias{saveRDS,CALIBFIT-method} +\alias{saveRDS,ISOFIND-method} +\alias{readRDS,character-method} +\title{Save and read objects produced by IsoriX using RDS files} +\usage{ +saveRDS_IsoriX( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\method{saveRDS}{ISOSCAPE}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\method{saveRDS}{CALIBFIT}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\method{saveRDS}{ISOFIND}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\method{readRDS}{character}(file, refhook = NULL) + +\S4method{saveRDS}{ISOSCAPE}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\S4method{saveRDS}{CALIBFIT}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\S4method{saveRDS}{ISOFIND}( + object, + file = "", + ascii = FALSE, + version = NULL, + compress = TRUE, + refhook = NULL +) + +\S4method{readRDS}{character}(file, refhook = NULL) +} +\arguments{ +\item{object}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) R object to serialize.} + +\item{file}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) a connection or the name of the file where the R object is saved to or read from.} + +\item{ascii}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) a logical. If \code{TRUE} or \code{NA}, an ASCII representation is written; otherwise (default), a binary one is used. See the comments in the help for \code{\link[base:save]{base::save}}.} + +\item{version}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) the workspace format version to use. \code{NULL} specifies the current default version (3). The only other supported value is \code{2}, the default from R 1.4.0 to R 3.5.0.} + +\item{compress}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) a logical specifying whether saving to a named file is to use "gzip" compression, or one of "gzip", "bzip2" or "xz" to indicate the type of compression to be used. Ignored if file is a connection.} + +\item{refhook}{(definition copied from \code{\link[base:readRDS]{base::readRDS}}:) a hook function for handling reference objects.} +} +\value{ +For \code{saveRDS}, \code{NULL} invisibly. + +For \code{readRDS}, an R object. +} +\description{ +Because files created with IsoriX contain \code{\link[terra:SpatRaster-class]{terra::SpatRaster}} and +\code{\link[terra:SpatVector-class]{terra::SpatVector}} objects, they cannot be saved using \code{\link[base:readRDS]{base::saveRDS}} +or \code{\link[base:save]{base::save}} functions. The reason is that objects created with \link{terra} +point to data stored in memory which are not contained in the R objects +themselves. Adapting the approach implemented in the \link{terra} package, we +provide a wrapper for \code{\link[base:readRDS]{base::saveRDS}} and \code{\link[base:readRDS]{base::readRDS}} functions, +which allows one to save and read objects produced with IsoriX by simply +using \code{saveRDS()} and \code{readRDS()}. +} +\details{ +\code{\link[base:readRDS]{base::saveRDS}} and \code{\link[base:readRDS]{base::readRDS}} are standard S3 functions. So in +order to be able to have a specific behaviour for objects produced with +IsoriX, we imported \code{saveRDS} and \code{readRDS} S4 generics from \link{terra} to +dispatch both S3 and S4 IsoriX-specific methods (see \link{Methods_for_S3}). The +S3 implementation is consistent with the rest of the package and presents all +usual benefits associated with S3 methods (e.g. simple access to the code). +The S4 implementation makes IsoriX methods compatible with the use of +\code{\link[terra:serialize]{terra::saveRDS}} and \code{\link[terra:serialize]{terra::readRDS}}. +} +\section{Functions}{ +\itemize{ +\item \code{saveRDS_IsoriX()}: S3 function to save IsoriX objects into a RDS file + +\item \code{saveRDS(ISOSCAPE)}: S3 method to save an \code{ISOSCAPE} object into a RDS file + +\item \code{saveRDS(CALIBFIT)}: S3 method to save a \code{CALIBFIT} object into a RDS file + +\item \code{saveRDS(ISOFIND)}: S3 method to save an \code{ISOFIND} object into a RDS file + +\item \code{readRDS(character)}: S3 method to read an object produced with IsoriX (or other) stored in a RDS file + +\item \code{saveRDS(ISOSCAPE)}: S4 method to save an \code{ISOSCAPE} object into a RDS file + +\item \code{saveRDS(CALIBFIT)}: S4 method to save an \code{CALIBFIT} object into a RDS file + +\item \code{saveRDS(ISOFIND)}: S4 method to save an \code{ISOFIND} object into a RDS file + +\item \code{readRDS(character)}: S4 method to read an object produced with IsoriX (or other) stored in a RDS file + +}} +\examples{ +if (getOption_IsoriX("example_maxtime") > 30) { + ## We prepare the data + GNIPDataDEagg <- prepsources(data = GNIPDataDE) + + ## We fit the models + GermanFit <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = TRUE) + ) + + ## We build the isoscapes + GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) + + ## Saving as RDS + filename <- tempfile(fileext = ".rds") # or whatever names you want + saveRDS(GermanScape, file = filename) + + ## Reading RDS + GermanScape2 <- readRDS(filename) + GermanScape2 + + ## Saving data.frame object as RDS + filename2 <- tempfile(fileext = ".rds") # or whatever names you want + saveRDS(iris, file = filename2) + + ## Reading RDS containing data.frame + iris2 <- readRDS(filename2) + iris2 + + ## Saving terra object as RDS + filename3 <- tempfile(fileext = ".rds") # or whatever names you want + f <- system.file("ex/elev.tif", package = "terra") + r <- rast(f) + saveRDS(r, file = filename3) + + ## Reading RDS containing terra object + r2 <- readRDS(filename3) + r2 +} + +} +\keyword{saving} diff --git a/IsoriX/tests/local_tests.R b/IsoriX/tests/local_tests.R new file mode 100644 index 0000000..9f24d7b --- /dev/null +++ b/IsoriX/tests/local_tests.R @@ -0,0 +1,34 @@ +if (FALSE) { ## for not running during checks + library(IsoriX) + options_IsoriX(example_maxtime = Inf) + + example(options_IsoriX) + example(getOption_IsoriX) + + example(getelev) # uncomment and run by hand + example(getprecip) # uncomment and run by hand + example(prepcipitate) # uncomment and run by hand + + example(prepraster) + example(prepsources) + + example(isofit) + example(isomultifit) + + example(isoscape) + example(isomultiscape) + + example(calibfit) + + example(isofind) + + example(create_aliens) + + example(saveRDS_IsoriX) + + library(terra) + example(saveRDS_IsoriX) +} + +# devtools::check() +# devtools::build_manual() diff --git a/IsoriX/tests/spelling.R b/IsoriX/tests/spelling.R index c600293..13f77d9 100644 --- a/IsoriX/tests/spelling.R +++ b/IsoriX/tests/spelling.R @@ -1,3 +1,6 @@ -if (requireNamespace('spelling', quietly = TRUE)) { - spelling::spell_check_test(vignettes = TRUE, error = FALSE, skip_on_cran = TRUE) +if (requireNamespace("spelling", quietly = TRUE)) { + spelling::spell_check_test( + vignettes = TRUE, error = FALSE, + skip_on_cran = TRUE + ) } diff --git a/IsoriX/tests/testthat/setup.R b/IsoriX/tests/testthat/setup.R new file mode 100644 index 0000000..dd89d46 --- /dev/null +++ b/IsoriX/tests/testthat/setup.R @@ -0,0 +1,8 @@ +## Delete Rplots.pdf if it exists (if tails fail, it does not otherwise get deleted) + +withr::defer( + { + if (file.exists("Rplots.pdf")) file.remove("Rplots.pdf") + }, + teardown_env() +) diff --git a/IsoriX/tests/testthat/test_internals.R b/IsoriX/tests/testthat/test_internals.R index 372d366..c0d827f 100644 --- a/IsoriX/tests/testthat/test_internals.R +++ b/IsoriX/tests/testthat/test_internals.R @@ -4,17 +4,14 @@ test_that("print_nice_and_round", { expect_equal(.print_nice_and_round(0.0, digits = 1), "0.0") expect_equal(.print_nice_and_round(0.0, digits = 2), "0.00") expect_error(.print_nice_and_round(0.0, digits = -2)) - } -) +}) test_that("make rasters and spatial points", { - expect_equal(class(.create_raster(long = seq_len(100)/10, lat = seq_len(100)/10, values = runif(10000)))[[1]], "SpatRaster") - expect_equal(class(.create_spatial_points(long = seq_len(100)/10, lat = seq_len(100)/10, values = runif(100)))[[1]], "SpatVector") - } -) + expect_equal(class(.create_raster(long = seq_len(100) / 10, lat = seq_len(100) / 10, values = runif(10000)))[[1]], "SpatRaster") + expect_equal(class(.create_spatial_points(long = seq_len(100) / 10, lat = seq_len(100) / 10, values = runif(100)))[[1]], "SpatVector") +}) test_that("convert month to numbers", { expect_equivalent(.converts_months_to_numbers("Jan"), 1) expect_warning(.converts_months_to_numbers("Jo")) - } -) +}) diff --git a/IsoriX/tests/testthat/test_na_handling.R b/IsoriX/tests/testthat/test_na_handling.R index 415dff6..3565750 100644 --- a/IsoriX/tests/testthat/test_na_handling.R +++ b/IsoriX/tests/testthat/test_na_handling.R @@ -1,21 +1,27 @@ - ## preparation step: GNIPDataDEagg <- prepsources(data = GNIPDataDE) ## tests: test_that("isofit() can handle NA", { GNIPDataDEagg[1, "n_source_value"] <- NA - - expect_error(GermanFit <<- isofit(data = GNIPDataDEagg, - mean_model_fix = list(elev = FALSE, lat_abs = FALSE), - mean_model_rand = list(uncorr = FALSE, spatial = TRUE), - disp_model_rand = list(uncorr = FALSE, spatial = FALSE)), regexp = NA) + + expect_error(GermanFit <<- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = FALSE, lat_abs = FALSE), + mean_model_rand = list(uncorr = FALSE, spatial = TRUE), + disp_model_rand = list(uncorr = FALSE, spatial = FALSE) + ), regexp = NA) }) test_that("isoscape() can handle NA", { ElevRasterDE[50, 50] <- NA - - expect_error(GermanScape <<- isoscape(raster = ElevRasterDE, isofit = GermanFit), regexp = NA) + GermanFit2 <- isofit( + data = GNIPDataDEagg, + mean_model_fix = list(elev = TRUE, lat_abs = FALSE), ## Elev is TRUE for NA to have an effect + mean_model_rand = list(uncorr = FALSE, spatial = TRUE), + disp_model_rand = list(uncorr = FALSE, spatial = FALSE) + ) + expect_error(GermanScape <<- isoscape(raster = ElevRasterDE, isofit = GermanFit2), regexp = NA) }) test_that("plot.ISOSCAPE() can handle NA", { @@ -24,9 +30,11 @@ test_that("plot.ISOSCAPE() can handle NA", { test_that("calibfit() can handle NA", { CalibDataAlien[1, "sample_value"] <- NA - expect_error(CalibAlien <<- calibfit(data = CalibDataAlien, - isofit = GermanFit, - control_optim = list(maxit = 15)), regexp = NA) + expect_error(CalibAlien <<- calibfit( + data = CalibDataAlien, + isofit = GermanFit, + control_optim = list(maxit = 15) + ), regexp = NA) }) test_that("plot.CALIBFIT can handle NA", { @@ -36,16 +44,16 @@ test_that("plot.CALIBFIT can handle NA", { test_that("isofind() can handle NA", { AssignDataAlien[1, "sample_value"] <- NA - expect_error(AssignmentDry <<- isofind(data = AssignDataAlien, - isoscape = GermanScape, - calibfit = CalibAlien), regexp = NA) - + expect_error(AssignmentDry <<- isofind( + data = AssignDataAlien, + isoscape = GermanScape, + calibfit = CalibAlien + ), regexp = NA) + expect_false(all(is.na(terra::values(AssignmentDry$group$pv$lyr.1)))) - }) test_that("plot.ISOFIND() can handle NA", { - expect_warning(plot(AssignmentDry, who = 2), regexp = "The p-values for an assignment samples containing only missing values are considered as 0.") - expect_error(plot(AssignmentDry, who = "group"), regexp = NA) + expect_warning(plot(AssignmentDry, who = 2)) # "The assignment test occurred at location(s) with unknown isoscape value(s); p-values set to 0 for such (a) location(s)." + expect_warning(plot(AssignmentDry, who = "group")) # "The assignment test occurred at location(s) with unknown isoscape value(s); p-values set to 0 for such (a) location(s)." }) - diff --git a/IsoriX/tests/testthat/test_options.R b/IsoriX/tests/testthat/test_options.R index c6e3e8e..a59f871 100644 --- a/IsoriX/tests/testthat/test_options.R +++ b/IsoriX/tests/testthat/test_options.R @@ -2,8 +2,8 @@ test_that("test options modification and retrieval", { options_IsoriX(example_maxtime = 30) expect_equal(getOption_IsoriX("example_maxtime"), 30) expect_equal(getOption_IsoriX("Ncpu"), 2) - expect_equal(length(getOption_IsoriX()), 3) + expect_equal(getOption_IsoriX("spaMM_debug"), FALSE) + expect_equal(length(getOption_IsoriX()), 4L) expect_error(getOption_IsoriX("bidon")) expect_error(getOption_IsoriX(c("example_maxtime", "bidon"))) - } -) +}) diff --git a/IsoriX/tests/testthat/test_tibbles.R b/IsoriX/tests/testthat/test_tibbles.R index f5f84a7..82bc019 100644 --- a/IsoriX/tests/testthat/test_tibbles.R +++ b/IsoriX/tests/testthat/test_tibbles.R @@ -1,10 +1,9 @@ - test_that("The workfow works with tibble", { set.seed(123) - index <- sample(1:nrow(GNIPDataDE), 50) + index <- sample(seq_len(nrow(GNIPDataDE)), 50) test_df <- GNIPDataDE[index, ] test_tbl <- test_df - + class(test_tbl) <- c("tbl_df", "tbl", "data.frame") test_tbl$source_ID <- as.character(test_tbl$source_ID) ref <- prepsources(test_df) @@ -12,35 +11,33 @@ test_that("The workfow works with tibble", { expect_equal(class(job), "data.frame") expect_equal(ref, job) - - ## Test that spaMM works correctly with tibbles + + ## Test that spaMM works correctly with tibbles ## Isofit - + ref_isofit <- isofit(ref) ref_2 <- ref_isofit$mean_fit$fixef - - class(job) <- c("tbl_df", "tbl", "data.frame") + + class(job) <- c("tbl_df", "tbl", "data.frame") job_isofit <- isofit(job) job_2 <- job_isofit$mean_fit$fixef - + expect_equal(ref_2, job_2) ## calibfit test_df2 <- CalibDataBat[1:10, ] test_tbl2 <- test_df2 - class(test_tbl2) <- c("tbl_df", "tbl", "data.frame") + class(test_tbl2) <- c("tbl_df", "tbl", "data.frame") test_tbl2$site_ID <- as.character(test_tbl2$site_ID) - - ref_calib <- calibfit(test_df2, ref_isofit) ## df and df + + ref_calib <- calibfit(test_df2, ref_isofit) ## df and df ref_3 <- ref_calib$fixefCov - + job_calib <- calibfit(test_tbl2, ref_isofit) ## tbl and df job_3 <- job_calib$fixefCov - job_calib2 <- calibfit(test_tbl2, job_isofit) ## tbl and tbl - job_33 <- job_calib2$fixefCov + job_calib2 <- calibfit(test_tbl2, job_isofit) ## tbl and tbl + job_33 <- job_calib2$fixefCov expect_equal(job_3, ref_3) expect_equal(job_33, ref_3) - -} -) +}) diff --git a/README.md b/README.md index b4024cf..ea036e2 100644 --- a/README.md +++ b/README.md @@ -32,13 +32,13 @@ You can download and install the stable version of IsoriX directly from within R install.packages("IsoriX", dependencies = TRUE) ``` -Note: if you get into troubles due to `elevatr`, `gmp`, `magick`, `rgl`, `testthat`, or `webshot2` retry using simply: +Note: if you get into troubles due to suggested package(s) (`colorspace`, `elevatr`, `gmp`, `magick`, `rgl`, `spelling`, `testthat`, `webshot2` or `withr`) retry using simply: ```R install.packages("IsoriX") ``` -These packages offer additional functionalities but some of them are particularly difficult to install on some systems. +These packages offer additional functionalities but some of them can be difficult to install on some systems. If you want the development version of IsoriX, you can download and install it by typing: @@ -57,7 +57,7 @@ library(httr) with_config(use_proxy("192.123.4.5:6789"), remotes::install_github("courtiol/IsoriX/IsoriX")) ``` -Off course, unless you are in the same institute than some of us, replace the numbers (`"192.123.4.5:6789"`) with those corresponding to your proxy settings! +Off course, replace the numbers (`"192.123.4.5:6789"`) with those corresponding to your proxy settings! ## How can you contribute? diff --git a/bookdown/Advanced.Rmd b/bookdown/Advanced.Rmd index 0550941..1b7d2b3 100644 --- a/bookdown/Advanced.Rmd +++ b/bookdown/Advanced.Rmd @@ -73,8 +73,8 @@ For example, consider you want to add the point that is the most compatible with The first step is to recover the coordinates for such a point: ```{r maximum, message = FALSE} -library(raster) -coord <- coordinates(AssignedBats2$group$pv) +library(terra) +coord <- crds(AssignedBats2$group$pv) MaxLocation <- coord[which.max(values(AssignedBats2$group$pv)), ] Maximum <- data.frame(long = MaxLocation[1], lat = MaxLocation[2]) Maximum @@ -132,7 +132,7 @@ As you can see, you first initialize the creation of the plot with the function As arguments, you probably want to specify the dimensions and the resolution of the file. The height and width are here considered to be in pixels (default setting, you can choose other units if you want using the argument `units`). Here we chose the so-called Full-HD or 1080p resolution (1080x1920) because we wanted to observe the isoscape carefully on a monitor of that resolution. If your screen is Full-HD, try it and display the plot in full screen to get better results (if the plot does not match the definition of your screen, things can get ugly), if your screen is not, try another resolution. -If you don't know what resolution your screen has, you can visit http://www.whatismyscreenresolution.com/. +If you don't know what resolution your screen has, you can visit https://screenresolutiontest.com. The parameter `res` is very useful as it rescales the line and fonts in the plot. So if everything is too small just increase the value and if everything looks too bold and ugly, decreases it. @@ -211,24 +211,22 @@ The trick is to simply add `dev='CairoPNG'` in the option of the chunks producin It is straightforward to export all spatial objects created by IsoriX into formats compatible with main software for Geographic Information System (GIS). This can be done using multiple R packages. -Here is an example of how to export a GTiff raster using the package [**raster**](https://rspatial.org/raster/pkg/7-writing.html): +Here is an example of how to export a GTiff raster using the package [**terra**](https://rspatial.org/spatial/5-files.html#writing-raster-data): ```{r save_raster_GIS_fake, eval=FALSE} -library(raster) +library(terra) writeRaster(EuropeIsoscape$isoscapes$mean, filename = "EuropeIsoscape.tif", - format = "GTiff", overwrite = TRUE, NAflag = -9999) ``` ```{r save_raster_GIS, echo=FALSE} -raster::writeRaster(EuropeIsoscape$isoscapes$mean, - filename = "output/EuropeIsoscape.tif", - format = "GTiff", - overwrite = TRUE, - NAflag = -9999) +terra::writeRaster(EuropeIsoscape$isoscapes$mean, + filename = "output/EuropeIsoscape.tif", + overwrite = TRUE, + NAflag = -9999) ``` @@ -356,20 +354,18 @@ EuropeIsoscape_weighted <- isoscape(raster = ElevEurope, isofit = EuropeFit_weig ``` ```{r rest_workflow_weights1_fake_real, echo = FALSE} -if (file.exists("output/EuropeFit_weighted.rda")) { - load("output/EuropeFit_weighted.rda") +if (file.exists("output/EuropeFit_weighted.rds")) { + EuropeFit_weighted <- readRDS("output/EuropeFit_weighted.rds") } else { EuropeFit_weighted <- isofit(data = GNIPData_with_precipEUagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) - save(EuropeFit_weighted, file = "output/EuropeFit_weighted.rda", compress = "xz") + saveRDS(EuropeFit_weighted, file = "output/EuropeFit_weighted.rds", compress = "xz") } -if (file.exists("output/EuropeIsoscape_weighted.rda")) { - load("output/EuropeIsoscape_weighted.rda") +if (file.exists("output/EuropeIsoscape_weighted.rds")) { + EuropeIsoscape_weighted <- readRDS("output/EuropeIsoscape_weighted.rds") } else { EuropeIsoscape_weighted <- isoscape(raster = ElevEurope, isofit = EuropeFit_weighted) - if (raster::inMemory(EuropeIsoscape_weighted$isoscapes)) { - save(EuropeIsoscape_weighted, file = "output/EuropeIsoscape_weighted.rda", compress = "xz") - } + saveRDS(EuropeIsoscape_weighted, file = "output/EuropeIsoscape_weighted.rds", compress = "xz") } ``` @@ -386,8 +382,8 @@ The two isoscapes look very similar, but we can clarify what the differences are levelplot(EuropeIsoscape_weighted$isoscapes$mean - EuropeIsoscape$isoscapes$mean, margin = FALSE, main = "mean EuropeIsoscape_weighted - mean EuropeIsoscape") + - layer(sp.polygons(CountryBorders, col = "white")) + - layer(sp.polygons(OceanMask, fill = "black")) + layer(lpolygon(CountryBorders, border = "white")) + + layer(lpolygon(OceanMask, col = "black")) ``` Let us similarly compare the isoscapes showing the prediction variance and the residual variance around the mean isoscape values: @@ -396,13 +392,13 @@ Let us similarly compare the isoscapes showing the prediction variance and the r levelplot(EuropeIsoscape_weighted$isoscapes$mean_predVar - EuropeIsoscape$isoscapes$mean_predVar, margin = FALSE, main = "predVar EuropeIsoscape_weighted - predVar EuropeIsoscape") + - layer(sp.polygons(CountryBorders, col = "white")) + - layer(sp.polygons(OceanMask, fill = "black")) + layer(lpolygon(CountryBorders, border = "white")) + + layer(lpolygon(OceanMask, col = "black")) levelplot(EuropeIsoscape_weighted$isoscapes$mean_residVar - EuropeIsoscape$isoscapes$mean_residVar, margin = FALSE, main = "residVar EuropeIsoscape_weighted - residVar EuropeIsoscape") + - layer(sp.polygons(CountryBorders, col = "white")) + - layer(sp.polygons(OceanMask, fill = "black")) + layer(lpolygon(CountryBorders, border = "white")) + + layer(lpolygon(OceanMask, col = "black")) ``` As you can see the residual variance is quite different because it now represents the (weighted) variation in isotopic values _between months_ for an average year and no longer the total variation combining both the variation across months and years. @@ -481,10 +477,10 @@ PrecipitationBrickEU <- prepcipitate(path = "input", raster = ElevEurope) ## no ``` ```{r prepare_precip_real, echo=FALSE} -if (file.exists("output/PrecipitationBrickEU.rda")) { +if (file.exists("output/PrecipitationBrickEU.rds")) { } else { PrecipitationBrickEU <- prepcipitate(path = "input", raster = ElevEurope) ## no '/' here! - save(PrecipitationBrickEU, file = "output/PrecipitationBrickEU.rda", compress = "xz") + saveRDS(PrecipitationBrickEU, file = "output/PrecipitationBrickEU.rds", compress = "xz") } ``` @@ -523,12 +519,12 @@ EuropeFit12 <- isomultifit(data = GNIPDataEU12agg, ``` ```{r isomultifit real, echo = FALSE, results = FALSE} -if (file.exists("output/EuropeFit12.rda")) { - load("output/EuropeFit12.rda") +if (file.exists("output/EuropeFit12.rds")) { + EuropeFit12 <- readRDS("output/EuropeFit12.rds") } else { EuropeFit12 <- isomultifit(data = GNIPDataEU12agg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) - save(EuropeFit12, file = "output/EuropeFit12.rda", compress = "xz") + saveRDS(EuropeFit12, file = "output/EuropeFit12.rds", compress = "xz") } ``` @@ -550,13 +546,13 @@ EuropeIsoscape_weighted2 <- isomultiscape(raster = ElevEurope, ``` ```{r EuropeIsoscape_weighted_real, echo=FALSE} -if (file.exists("output/EuropeIsoscape_weighted2.rda")) { - load("output/EuropeIsoscape_weighted2.rda") +if (file.exists("output/EuropeIsoscape_weighted2.rds")) { + readRDS("output/EuropeIsoscape_weighted2.rds") } else { EuropeIsoscape_weighted2 <- isomultiscape(raster = ElevEurope, isofit = EuropeFit12, weighting = PrecipitationBrickEU) - save(EuropeIsoscape_weighted2, file = "output/EuropeIsoscape_weighted2.rda") + saveRDS(EuropeIsoscape_weighted2, file = "output/EuropeIsoscape_weighted2.rds") } ``` diff --git a/bookdown/Assignment.Rmd b/bookdown/Assignment.Rmd index 582324e..04437d0 100644 --- a/bookdown/Assignment.Rmd +++ b/bookdown/Assignment.Rmd @@ -49,13 +49,13 @@ AssignedBats <- isofind(data = AssignDataBat, ``` ```{r assignment real, echo = FALSE, message = FALSE} -if (!file.exists("output/AssignedBats.rda")) { +if (!file.exists("output/AssignedBats.rds")) { AssignedBats <- isofind(data = AssignDataBat, isoscape = EuropeIsoscape, calibfit = CalibBats) - save(AssignedBats, file = "output/AssignedBats.rda", compress = "xz") + saveRDS(AssignedBats, file = "output/AssignedBats.rds", compress = "xz") } else { - load("output/AssignedBats.rda") + AssignedBats <- readRDS("output/AssignedBats.rda") } ``` diff --git a/bookdown/Calibration.Rmd b/bookdown/Calibration.Rmd index 7888f97..e7a6c48 100644 --- a/bookdown/Calibration.Rmd +++ b/bookdown/Calibration.Rmd @@ -56,7 +56,7 @@ calib_table <- tibble::tibble(Method = c("wild", "lab", "desk", "desk_inverse"), c("known", "known", "unknown, estimated from fit `lm(sample_value ~ source_value)`", "unknown, estimated from fit `lm(source_value ~ sample_value)`"), "Isotopic composition of the environment associated with the calibration samples" = c("unknown, estimated from the isoscapes", "known, or assumed to be known", "unknown, estimated from fit `lm(sample_value ~ source_value)`", "unknown, estimated from fit `lm(source_value ~ sample_value)`")) -kable(calib_table) %>% kable_styling(bootstrap_options = "striped", font_size = 15) +kable(calib_table) |> kable_styling(bootstrap_options = "striped", font_size = 15) ``` When using a method other than the default method, you must set the method to be used by `calibfit()` using a specific argument called `method`, e.g. `calibfit(..., method = 'lab')`. @@ -82,7 +82,7 @@ head(CalibDataBat) ``` ```{r CalibDataBat real, echo = FALSE} -kable(head(CalibDataBat)) %>% kable_styling(bootstrap_options = "striped", font_size = 15) +kable(head(CalibDataBat)) |> kable_styling(bootstrap_options = "striped", font_size = 15) ``` For your own applications, make sure that the dataset you use has the same structure. That is, the calibration dataset must contain (as long as you use the calibration method 'wild') a column `site_ID` of class factor describing the sampling site, the columns containing all covariates required for the geostatistical models to predict the isoscape values at these locations (so here `long`, `lat`, and `elev`), and a column `sample_value` containing the measured isotope values of the calibration samples. An important constraint to keep in mind is that only measurements on multiple calibration samples per location can capture variation between different samples exposed to the same isotope environment. Therefore, IsoriX needs multiple calibration samples for several (not necessarily all) sampling locations. That means that some levels of `site_ID` must be repeated on several rows. If laboratory measurements are replicated, we recommend to consider the mean measurement value for each sampling unit (here for each bat) instead of considering multiple rows for a given calibration sample in order to avoid confounding biological replicates from technical ones. If you do so, note however that this implies the technical variance to be neglected (which should be fine because it is usually very low unless something went wrong in the lab). @@ -96,11 +96,11 @@ CalibBats <- calibfit(data = CalibDataBat, isofit = EuropeFit) ``` ```{r calib real, echo = FALSE, message = FALSE} -if (!file.exists("output/CalibBats.rda")) { +if (!file.exists("output/CalibBats.rds")) { CalibBats <- calibfit(data = CalibDataBat, isofit = EuropeFit) - save(CalibBats, file = "output/CalibBats.rda", compress = "xz") + saveRDS(CalibBats, file = "output/CalibBats.rds", compress = "xz") } else { - load("output/CalibBats.rda") + CalibBats <- readRDS("output/CalibBats.rds") } ``` diff --git a/bookdown/Installation.Rmd b/bookdown/Installation.Rmd index 43ab72e..81b10fc 100644 --- a/bookdown/Installation.Rmd +++ b/bookdown/Installation.Rmd @@ -48,7 +48,7 @@ remotes::install_github("courtiol/IsoriX/IsoriX", dependencies = TRUE) ``` ::: {.rmdnote} -**Geeky note**: Note that the double mention of IsoriX in `"courtiol/IsoriX/IsoriX"` is not a typo: the name is present twice because on our IsoriX repository in GitHub the content of the R package is located in a sub-folder also called IsoriX. +**Geeky note**: Note that the double mention of IsoriX in `"courtiol/IsoriX/IsoriX"` is not a typo: the name is present twice because on our IsoriX repository in GitHub the content of the R package is located inside a sub-folder also called IsoriX. ::: ## Dependencies to other R packages @@ -56,7 +56,7 @@ remotes::install_github("courtiol/IsoriX/IsoriX", dependencies = TRUE) In the unlikely event you encounter a difficulty installing IsoriX, that may come from another package on which IsoriX depends, or from another package on which these other packages depends on, and so on. Indeed, much of the work performed by a given package in R is actually outsourced to other packages. As mentioned in chapter \@ref(introduction) IsoriX uses, for example, spaMM to fit the models and rasterVis to make the plots. -Thus, if a single package that is needed by IsoriX or by the packages used by IsoriX cannot install, IsoriX will not install (or worse, it may install but not work). +Thus, if a single package that is needed by IsoriX (or by the packages used by IsoriX) cannot install, IsoriX will not install (or worse, it may install but not work). To avoid problems caused by dependencies, the only thing we can do on our side is to minimise dependencies and try not to use packages people have often problems with. That is why we have already stopped relying on packages (e.g. Cairo) which are not so easy to install on some systems. @@ -99,17 +99,16 @@ plot(miniCRAN::makeDepGraph("IsoriX", recursive = FALSE, suggests = FALSE)) ``` ::: {.rmdnote} -**Geeky note**: We are not using the original version of the package miniCRAN because it would display for each dependency their own dependencies, and for each of those dependencies, it would display in turn all their own dependencies, and so on. +**Geeky note**: We are not using the original version of the package miniCRAN because it would display the own dependencies of each dependency of IsoriX. ::: While the number of dependencies is reasonable, since each package depends on other packages, the current stable release of IsoriX indirectly uses `r length(miniCRAN::pkgDep("IsoriX", suggests = FALSE)) - 1` packages (on top of some R base packages) and `r length(miniCRAN::pkgDep("IsoriX", suggests = TRUE)) - length(miniCRAN::pkgDep("IsoriX", suggests = FALSE))` more packages can optionally be used to increase functionalities. All in all, this represents quite a lot of dependencies, and problems can thus happen. -In case of problem(s), you should thus read carefully the error message and make sure that the problem is not caused by some other package. If that is the case, try to install the problematic package on its own (check the documentation of that other package, it may help). -One package that often causes problem is [**rgdal**](http://rgdal.r-forge.r-project.org/index.html), due to its dependencies on many system libraries. -A work around is to simply not rely on the problematic package (rgdal or other). -That is possible as long as the problematic package is one that IsoriX "suggests" (or one that is required by such suggested packages) and not one that IsoriX depends on. +In case of problem(s), you should thus read carefully the error message and make sure that the problem is not caused by some other packages. If that is the case, try to install the problematic packages on their own (check the documentation of that other packages, it may help). +A work around is to simply not rely on the problematic packages. +That is possible as long as the problematic packages are among those that IsoriX "suggests" (or among those that are required by such suggested packages) and not packages that IsoriX depends on. The list of such packages that are not crucial for IsoriX (but that offer additional functionalities) is this: *`r sort(setdiff(miniCRAN::pkgDep("IsoriX", suggests = TRUE), miniCRAN::pkgDep("IsoriX", suggests = FALSE)))`*. @@ -119,6 +118,8 @@ Since IsoriX could live without them and still perform all essential tasks, to c install.packages("IsoriX") ``` +That way the argument `dependencies` of the function `install.packages()` will be set to its default setting `NA`, which does not lead to install "suggested" packages. + Note: if you are used to install the packages using the RStudio menu, there is a small box that you can tick or untick to do the same thing. @@ -157,14 +158,14 @@ None of our examples takes a terribly long time to run so we recommend you to ac options_IsoriX(example_maxtime = Inf) ``` -The option `Ncpu` allows you to set up how many CPU you allow IsoriX to use. -Most computer nowadays have 2, 4 or 8. If a function can use several CPU and performed -so-called parallel computing, the computation will be much faster if you allows it to use more CPU. +The option `Ncpu` allows you to set up how many Central Processing Units (CPUs) you allow IsoriX to use. +Most computer nowadays have 2, 4 or 8. If a function can use several CPUs and performed +so-called parallel computing, the computation will be much faster if you allows it to use more CPUs. For now, only a few functions in IsoriX can make good use of that. We will try to make more functions able to perform such parallel computing in the future. -We considered 2 as the default number of CPU, but you may want to increase this. -For example if you have 4 CPU you can do: +We considered 2 as the default number of CPUs, but you may want to increase this. +For example if you have 4 CPUs you can do: ```{r Ncpu, eval = FALSE} options_IsoriX(Ncpu = 4) diff --git a/bookdown/Isoscape.Rmd b/bookdown/Isoscape.Rmd index 5c571f3..76ff415 100644 --- a/bookdown/Isoscape.Rmd +++ b/bookdown/Isoscape.Rmd @@ -5,24 +5,25 @@ To get an isoscape, you need data capturing how the isotope composition varies in space. In this documentation, we will use data provided by the Global Networks of Isotopes in Precipitation (GNIP), but you can use whatever source of data you prefer, or have access to. In any case, do make sure to format your data the same way we do it. -Precisely, your dataset should be a `data.frame` (or `tibble`) with the same columns as the ones shown below in section \@ref(GNIPDataDE). +Precisely, your dataset should be a `data.frame` (or a `tibble`) with the same columns as the ones shown below in section \@ref(GNIPDataDE). ### The GNIP database {#GNIP} You can download precipitation isotope data for hydrogen and oxygen from the Global Networks of Isotopes in Precipitation (GNIP). -This step must be done outside IsoriX since there is no API for GNIP. +This step must be done outside IsoriX since there is no application programming interface for GNIP. -To get to know what the GNIP is, its history, and more importantly its terms of use and information about the data, go there: http://www-naweb.iaea.org/napc/ih/IHS_resources_gnip.html +To get to know what the GNIP go there and explore related resources: +https://www.iaea.org/services/networks/gnip The GNIP data are free to download after the registration process is completed. The following link will bring you to the right place to create an account: https://websso.iaea.org/IM/UserRegistrationPage.aspx?returnpage=http://nucleus.iaea.org/wiser -Once your account has been activated, download the data you need from here: +Once your account has been activated, download the data you need from the WISER interface: https://nucleus.iaea.org/wiser/index.aspx (Tab Datasets) -For the time being, downloads are limited to 5,000 records per batch, which makes the compilation of huge databases fastidious. +Last time we checked, downloads were limited to 5,000 records per batch, which makes the compilation of huge databases fastidious. GNIP promised to come up, in the future, with datasets directly prepared for IsoriX to save you the trouble. We are eagerly waiting for this to happen! @@ -55,7 +56,7 @@ For example, the first row contains the following information: ```{r GNIP_raw_1st_row, echo = FALSE} ## we display the first row for this dataset as a column for visualization -kable(t(rawGNIP[1, ])) %>% kable_styling(bootstrap_options = "striped", font_size = 15) +kable(t(rawGNIP[1, ])) |> kable_styling(bootstrap_options = "striped", font_size = 15) ``` We are now going to reshape these data step-by-step to make them ready for IsoriX. @@ -120,7 +121,7 @@ As you can see, the format is the same as the one for `GNIPDataDE`, which is pre In order to build your isoscape with IsoriX, your dataset must be aggregated in such a way that each observation corresponds to the mean and variance of isotope values collected in one location, over a time period. -To aggregate the raw data, you can choose to aggregate your dataset on your own, or to use our function `prepsources()`. +To aggregate the raw data, you can choose to aggregate your dataset on your own (e.g. using the package [**dplyr**](https://dplyr.tidyverse.org/index.html)), or to use our function `prepsources()`. This function allows you to apply some restriction on the data, such as selecting only particular months, years, or locations. The function `prepsources()` also allows you to aggregate the data separately for different time periods (see section \@ref(weighted)). @@ -187,31 +188,33 @@ Therefore, you may want to save the fitted models in order to be allowed to reus This can be done as follow: ```{r saving models fake, eval = FALSE} -save(EuropeFit, file = "EuropeFit.rda", compress = "xz") +saveRDS(EuropeFit, file = "EuropeFit.rds", compress = "xz") ``` ```{r saving models real, echo = FALSE, warning=FALSE} if (!file.exists("output/EuropeFit.rda")) - save(EuropeFit, file = "output/EuropeFit.rda", compress = "xz") + saveRDS(EuropeFit, file = "output/EuropeFit.rds", compress = "xz") ``` -The function `save()` will (by default) store your R object in a file that can be found in your working directory. -To use `save()`, we must provide the object you want as a first argument of the function. +The function `saveRDS()` will store your R object in a file. +To use `saveRDS()`, you must provide the object you want as a first argument of the function. Then, `file =` defines the name of the file that will store the R object in your hard drive. You can also include a path to this name so to store the file wherever you want. -This name can be different from the name object but naming the file as the object allows you to remember what the name of the stored object is (check `?saveRDS` for an alternative way of saving R objects which allows for you to name objects when loading them). +This name can be different from the name of the object but here we stick to the same name for clarity. The last argument `compress =` is optional; it allows for the creation of smaller files without loosing any content, so we always use it. -For loading a saved object (in a new session of R for example), just use the function `load()` as follows (but make sure the saved object is in your working directory or include the path to the file names): +For loading a saved object (in a new session of R for example), just use the function `readRDS()` as follows: ```{r loading models, eval = FALSE} -load(file = "EuropeFit.rda") +EuropeFit <- readRDS(file = "EuropeFit.rds") ``` _Be careful_, we do not recommend you to reuse saved object after updating either IsoriX, or spaMM, or both. After one of such update, the best practice to make sure that every thing is working properly is to refit all your models. By doing this you may also benefit from potentially new improvements we would have implemented. +There is another way to save and load object in R, which is to use the function `save()` and `load()`. +However, it is better to get to used to `saveRDS()` and `readRDS()` because only those can be used to store objects that contain spatial information (which we will do below). ## Examining the fitted models @@ -229,20 +232,22 @@ plot(EuropeFit) options_IsoriX(old_opt) ``` -In the panel produced, the left column shows the relationship between the observed and predicted response (top) and the variation in spatial autocorrelation with the distance between location (bottom) captured by the model for the fit called `mean_fit`, which corresponds to the fit of the mean isotopic values. -The right column shows the same information for the fit called `disp_fit`, which corresponds to the fit of the residual dispersion variance in the isotope values. -On the first row you can see points distributed practically along the 1:1 diagonal. +The first plot shows the relationship between the observed and predicted response for the fitted model called `mean_fit`, which corresponds to the fit of the mean isotopic values. +The second plot shows the same thing for the model called `disp_fit`, which corresponds to the fit of the residual dispersion variance in the isotope values. +Here you can see, in these first two plots points distributed practically along the 1:1 diagonal. A different slope would suggest a high residual variance of the data. -We do not expect the points to fall exactly on the line because the model fit does not attempt to predict perfectly the observations used during the fit. +We do not expect the points to fall exactly on the 1:1 lines because the model fit does not attempt to predict perfectly the observations used during the fit. Instead, the model fit produces a smooth surface that aims at reaching a maximum predictive power for locations not considered during the fit. -The second row gives you an idea of the strength of the spatial autocorrelation captured by the models. + +The third and fourth plots show the variation in spatial autocorrelation with the distance between locations captured by `mean_fit` and `disp_fit`, respectively. +These plots thus give you an idea of the strength of the spatial autocorrelation captured by the models. Here the results suggest that the autocorrelation is very strong. Not considering this autocorrelation would thus lead here to a poor approximation of the isoscape and resulting assignments. ### Examining the summary tables -To simply explore the fitted models you can simply type their name and the information of each fit will be displayed: +To explore the fitted models you can start by simply typing their name and the information of each fit will be displayed: ```{r summary isofit} EuropeFit @@ -265,7 +270,7 @@ AIC(EuropeFit$mean_fit) ``` ```{r print AIC EuropeFit, echo = FALSE} -print(AIC(EuropeFit$mean_fit)) +print(AIC(EuropeFit$mean_fit, verbose = FALSE)) ``` Note that we re-exported some spaMM functions for you to use without the need to load the package spaMM (e.g. `AIC.HLfit()` which is implicitly here), but you will have to call `library(spaMM)` to access all spaMM functions. @@ -292,14 +297,14 @@ if (!file.exists("input/elevation_world_z5.tif")) { } ``` -You may not need to pass any argument to `getelev()`, but here we opted to choose to define ourselves where to store the elevation raster and how to call such file using `file = "input/elevation_EU_z5.tif"`. -We did not alter the resolution which is high enough under the default setting for our example, but doing so would be easy: one simply needs to define a value lower or larger than 5 for an argument called `z` (see `?getelev()` for details and [here](https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution) for information on what value for the zoom parameter `z` really implies). +You may not need to pass any argument to `getelev()`, but here we chose to define ourselves where to store the elevation raster and how to call such file using `file = "input/elevation_EU_z5.tif"`. +We did not alter the default resolution, which is high enough for our example, but doing so would be easy: one simply needs to define a value lower or larger than 5 for an argument called `z` (see `?getelev()` for details and [here](https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution) for information on what value for the zoom parameter `z` really implies). -We then import the high resolution elevation raster and transform it into an R object of class `RasterLayer` object using the package raster: +We then import the high resolution elevation raster and transform it into an R object of class `SpatRaster` object using the package [**terra**](https://rspatial.github.io/terra/index.html): ```{r elevationraster, dev='CairoPNG'} ## we import the high resolution raster -ElevWorld <- raster("input/elevation_world_z5.tif") +ElevWorld <- rast("input/elevation_world_z5.tif") ## we check that the import worked ElevWorld @@ -323,15 +328,14 @@ To crop and resize the structural raster, you can use our function `prepraster() ::: {.rmdnote} **Geeky note**: In principle you could directly crop and resize the raster to the dimensions you want before downloading it (`getelev()` does allows for you to do that). However, in practice, we recommend you to download a raster that is a larger than what you need (`getelev()` downloads the full world by default, which should always be sufficient) and with a resolution overshooting a little what you may need. -The benefit of relying on `prepraster()` rather than `getelev()` to crop and downsize the structural raster is that if you don't like the result, you can simply rerun `prepraster()` again -- with different settings -- without having to download again a large amount of data from the net. -The only reason to rerun `getelev()` would be if you think the resolution of your initial raster is too low. +The benefit of relying on `prepraster()` rather than `getelev()` to crop and downsize the structural raster is that you can study the impact of your decisions by simply rerunning `prepraster()` again -- with different settings -- without having to download again a large amount of data from the net. ::: -Here, the structural raster downloaded with `getelev()` covers the entire planet, so we will crop it to the limit of Europe which we also used to select our source data (see section \@ref(processing)). +Here, the structural raster downloaded with `getelev()` covers the entire planet, so we will crop it to the limits of Europe, which we also used to select our source data (see section \@ref(processing)). To crop the elevation raster to a particular size, you can either provide a set of coordinates to `prepraster()`by means of the argument `manual_crop`. Alternatively, you can simply provide a fitted model to the function `prepraster()` by means of the argument `isofit`. -In this latter case, the function will determine the coordinates required for the cropping automatically, from the coordinates of the weather stations used during the fitting procedure. +In this latter case, the function will automatically determine the coordinates required for the cropping, from the coordinates of the weather stations used during the fitting procedure. We choose here to use this simpler way of defining the cropping area: ```{r build ElevEurope fake, eval = FALSE} @@ -341,22 +345,21 @@ ElevEurope <- prepraster(raster = ElevWorld, ``` ```{r build ElevEurope real, echo = FALSE, results = FALSE} -if (file.exists("output/ElevEurope.rda")) { - load("output/ElevEurope.rda") } else { - ElevEurope <- prepraster(raster = ElevWorld, - isofit = EuropeFit, - aggregation_factor = 4) - if (raster::inMemory(ElevEurope)) { - save(ElevEurope, file = "output/ElevEurope.rda", compress = "xz") - } +if (file.exists("output/ElevEurope.rds")) { + ElevEurope <- terra::readRDS("output/ElevEurope.rds") + } else { + ElevEurope <- prepraster(raster = ElevWorld, + isofit = EuropeFit, + aggregation_factor = 4) + terra::saveRDS(ElevEurope, file = "output/ElevEurope.rds", compress = "xz") } ``` You may see that we also chose here to reduce the resolution of the elevation raster by choosing an aggregation factor of 4. We recommend to first use a large aggregation factor (e.g. 10) to draft your workflow and quickly notice errors in your code. -Then, once things are working as they should, we recommend you to use the lowest aggregation factor that you hardware and patience can handle (i.e. 1 if possible). +Once things are working as they should, we recommend you then use the lowest aggregation factor that you hardware and patience can handle (i.e. 1 if possible). As mentioned above, if you need isoscapes with a even greater resolution, use `getelev()` with a value of the parameter `z` that is higher than 5 (an increment of 1 -- i.e. `z = 6` already makes a big difference as shown [here](https://github.com/tilezen/joerd/blob/master/docs/data-sources.md#what-is-the-ground-resolution)). -Here we set the `aggregation_factor` to 4 as something in between which produces plots nice enough for this tutorial, without being too heavy for us to recompile this bookdown quickly; but you should use a higher resolution than used here for real applications. +Here we set the `aggregation_factor` to 4 as something in between which produces plots nice enough for this tutorial, without being too heavy for us to recompile this bookdown quickly; but, for real applications, you should use a higher resolution than used here. You can easily check what your structural raster looks like by plotting it. Compared to the map plotting above for `ElevWorld`, we will now draw a slightly more advanced map using the functions of the packages [**lattice**](http://lattice.r-forge.r-project.org) and [**rasterVis**](https://oscarperpinan.github.io/rastervis/) which we made available in IsoriX: @@ -366,8 +369,8 @@ levelplot(ElevEurope, margin = FALSE, col.regions = viridisLite::turbo, ## the colour palette main = "Structural raster") + -layer(sp.polygons(CountryBorders, col = "white")) + -layer(sp.polygons(OceanMask, col = "white", fill = "lightgreen")) +layer(lpolygon(CountryBorders, border = "white")) + +layer(lpolygon(OceanMask, border = "white", col = "lightgrey")) ``` Note that, in this raster, the highest elevation does not correspond to the highest elevations that are known in the depicted area (the maximal value in the raster is `r round(max(values(ElevEurope)))`m, but Mount Elbrus is 5642m tall). @@ -391,14 +394,12 @@ EuropeIsoscape <- isoscape(raster = ElevEurope, ``` ```{r build EuropeIsoscape real, warning = FALSE, echo = FALSE, results = FALSE} -if (file.exists("output/EuropeIsoscape.rda")) { - load("output/EuropeIsoscape.rda") +if (file.exists("output/EuropeIsoscape.rds")) { + EuropeIsoscape <- readRDS("output/EuropeIsoscape.rds") } else { EuropeIsoscape <- isoscape(raster = ElevEurope, isofit = EuropeFit) - if (raster::inMemory(EuropeIsoscape$isoscapes)) { - save(EuropeIsoscape, file = "output/EuropeIsoscape.rda", compress = "xz") - } + saveRDS(EuropeIsoscape, file = "output/EuropeIsoscape.rds", compress = "xz") } ``` diff --git a/bookdown/Preface.Rmd b/bookdown/Preface.Rmd index 02492dd..224671f 100644 --- a/bookdown/Preface.Rmd +++ b/bookdown/Preface.Rmd @@ -9,29 +9,29 @@ That way, we can include more content, display better pictures and update the do ## What is IsoriX? IsoriX is an R package that can be used for building isoscapes and inferring the geographic origin of organisms based on their isotopic signature [@IsoriX]. -This package employs a new statistical framework for doing all of this which is based on mixed models (GLMM). +This package employs a new statistical framework based on mixed models (GLMM) for doing all of this. As most other packages dedicated to specific applications, IsoriX is essentially a simple interface to several other packages more general in scope. Specifically, it uses the package [**spaMM**](https://kimura.univ-montp2.fr/~rousset/spaMM.htm) for fitting and predicting isoscapes, and for performing the assignment. +The spatial data generated by IsoriX are organised into rasters via the package [**terra**](https://rspatial.github.io/terra/index.html) (since IsoriX version 0.9.1, and before via [**raster**](https://rspatial.org/raster/)). IsoriX also heavily relies on the package [**rasterVis**](https://oscarperpinan.github.io/rastervis/) for plotting the maps using the powerful [**lattice**](http://lattice.r-forge.r-project.org) visualization system. -Knowing these packages is not necessary to handle IsoriX but it certainly helps for advanced applications. +Knowing these packages is not necessary to handle IsoriX, but it certainly helps for advanced applications. ## Who are we? -The IsoriX core Team is so far constituted by: +The current IsoriX core Team includes: * Alexandre Courtiol * François Rousset - * Marie-Sophie Rohwaeder * Stephanie Kramer-Schadt Alex does the programming for IsoriX. François does the programming for spaMM so as to make IsoriX working always better. Alex and François are also the ones that have cooked up together the statistical framework behind IsoriX. -Marie helped with some of the programming during a short internship with Alex. -Stephanie is the person who initiated this project and who has co-supervised many students whose project relied on IsoriX. +Stephanie is the person who initiated this project and who has co-supervised many students whose projects relied on IsoriX. Alex and Stephanie are all based in the Leibniz Institute for Zoo and Wildlife Research in Berlin (Germany). François is based at the Institut des Sciences de l'Evolution in Montpellier (France). -We are indebted to other scientists who have helped us one way or another and the extended IsoriX family consists of all the authors list in the main IsoriX publication [@IsoriX]: + +We are indebted to other scientists who have helped us one way or another and the extended IsoriX family consists of all the authors listed in the main IsoriX publication [@IsoriX]: ```{r citation} print(citation(package = "IsoriX")[[1]], bibtex = FALSE) @@ -39,7 +39,7 @@ print(citation(package = "IsoriX")[[1]], bibtex = FALSE) ## Who are you? -We don't know all IsoriX users but we would love to! +We don't know all IsoriX users, but we would love to! For us it is important to know who uses IsoriX in order to best allocate our efforts and make IsoriX better for those who use it. So if you are thinking of using IsoriX, please subscribe to our [Google group (a mailing list)](https://groups.google.com/g/IsoriX) and feel free to write us a little message about your project. @@ -48,9 +48,9 @@ If it is not the case, you should read [_an Introduction to R_](https://cran.r-p We will also assume that you all know a little bit about stable isotopes and isoscapes. For the stats bits, we will also assume that you now Generalized Linear Models and hopefully a little bit about mixed models too. -## Wanna help? +## We need your help! -If you want to help us to make IsoriX great, there are plenty things you could do irrespective of your knownledge and skills. Please check chapter \@ref(contrib) for details. +For none of member of the IsoriX core Team, IsoriX is a main research topic, nor the main tool we use. We developed it so as to help our colleagues and others, but we cannot dedicate a huge amount of time to it. So, we would greatly appreciate any help to improve IsoriX and/or its documentation. There are plenty things you could do to help us, irrespective of your knowledge and skills. Please check chapter \@ref(contrib) for details, or contact us using our [mailing list](https://groups.google.com/g/IsoriX). ## Acknowledgements diff --git a/bookdown/README.md b/bookdown/README.md index 93cb4e3..aad5ab4 100644 --- a/bookdown/README.md +++ b/bookdown/README.md @@ -2,19 +2,21 @@ This is the development folder for the documentation about IsoriX. -You can find the compiled (ie. readable) version of this documentation [here](https://bookdown.org/content/782/). +You can find the compiled (i.e., readable) version of this documentation [here](https://bookdown.org/content/782/). ## Notes for developers This bookdown follows the model merge and knit, rather than knit and merge. As a consequence, do not attempt to knit individual chapters, it won't work. Instead, you must render the whole thing at once. + Before doing that, do make sure to upload all your packages and to delete the content of the folder "output"; otherwise the bookdown will use objects created with old versions of packages. + Do make sure however that the folder "output" exists, otherwise objects won't be saved. ### Workflow for preparing an update -Make sure that you have the following packages installed (but no need to load them): rsconnect, servr. +Make sure that you have the following packages installed (but no need to load them): **rsconnect**, **servr**. Then, render the full book once, so as to create all the content stored in the folder "output". @@ -51,11 +53,11 @@ Check the bookdown online. If some steps are particularly slow, instead of relying on knitr cashing, it is best to store the created objects in the folder "output" and to use code similar to this: ```r -if (file.exists("output/some_object_slow_to_create.rda")) { - load("output/some_object_slow_to_create.rda") +if (file.exists("output/some_object_slow_to_create.rds")) { + some_object_slow_to_create <- readRDS("output/some_object_slow_to_create.rds") } else { some_object_slow_to_create <- slow_fn() - save(some_object_slow_to_create, file = "output/some_object_slow_to_create.rda", compress = "xz") + saveRDS(some_object_slow_to_create, file = "output/some_object_slow_to_create.rds", compress = "xz") } ``` diff --git a/bookdown/index.Rmd b/bookdown/index.Rmd index 4a20f02..d082c68 100644 --- a/bookdown/index.Rmd +++ b/bookdown/index.Rmd @@ -55,28 +55,38 @@ It is a work in progress, but it already contains plenty of material that should ```{r image_intro, echo = FALSE, dev = 'CairoPNG', animation.hook = 'gifski', interval = 3, fig.with = 6, cache = FALSE} ## for the following to work several chapters must be compiled! -if (file.exists("output/EuropeIsoscape.rda") & - file.exists("output/CalibBats.rda") & - file.exists("output/AssignedBats.rda")) { - load("output/EuropeIsoscape.rda") - load("output/CalibBats.rda") - load("output/AssignedBats.rda") + +if (file.exists("output/EuropeIsoscape.rds") & + file.exists("output/CalibBats.rds") & + file.exists("output/AssignedBats.rds")) { + + EuropeIsoscape <- readRDS("output/EuropeIsoscape.rds") + CalibBats <- readRDS("output/CalibBats.rds") + AssignedBats <- readRDS("output/AssignedBats.rds") + plot(EuropeIsoscape) + plot(EuropeIsoscape, sources = list(pch = 3, col = "orange"), borders = list(col = "white"), mask = list(fill = "darkgrey"), palette = list(range = c(-130, 10), step = 1, n_labels = 10, fn = "rainbow")) + plot(EuropeIsoscape, title = "H Isoscape", sources = list(draw = FALSE), borders = list(borders = NULL), mask = list(fill = "black"), palette = list(range = c(-130, 20), step = 30, fn = NULL)) + plot(EuropeIsoscape, which = "mean_predVar") + plot(EuropeIsoscape, which = "mean_residVar") + plot(CalibBats) + plot(AssignedBats, who = "Nnoc_15") + plot(AssignedBats, who = 1:4, sources = list(draw = FALSE), calibs = list(draw = FALSE),