├── .github ├── .gitignore └── workflows │ └── deploy_bookdown.yml ├── .gitignore ├── .travis.yml ├── 00-course-preparation.Rmd ├── 01-Preprocessing.Rmd ├── 02-standardisation.Rmd ├── 03-error-checking.Rmd ├── 04-analysis_covariates.Rmd ├── 05-exploration.Rmd ├── 06-community_metrics.Rmd ├── 07-habitat_use.Rmd ├── 08-occupancy.Rmd ├── 09-activity.Rmd ├── 10-density.Rmd ├── 11-behaviour.Rmd ├── 12-on_your_own.Rmd ├── 99-references.Rmd ├── DESCRIPTION ├── Dockerfile ├── LICENSE ├── README.md ├── WildCo_Data_Analysis.Rproj ├── _bookdown.yml ├── _build.sh ├── _deploy.sh ├── _output.yml ├── book.bib ├── data └── raw_data │ ├── AlgarRestorationProject_raw_species_list.csv │ ├── Los_Amigos_Camera_Trapping_raw_species_list.csv │ ├── example_covariates │ ├── LCC_codes.csv │ ├── example_dataframe.csv │ ├── example_raster.tif │ └── example_speed.csv │ ├── example_data │ ├── cameras.csv │ ├── common_names.csv │ ├── deployments.csv │ ├── images.csv │ └── projects.csv │ └── your_data │ ├── cameras.csv │ ├── deployments.csv │ ├── images.csv │ └── projects.csv ├── docs ├── 404.html ├── Intro-to-data-analysis_files │ └── figure-html │ │ ├── ch4_17-1.png │ │ ├── ch4_18-1.png │ │ ├── ch4_20-1.png │ │ ├── ch4_22-1.png │ │ ├── ch4_24-1.png │ │ ├── ch4_29-1.png │ │ ├── ch4_30-1.png │ │ ├── ch4_33-1.png │ │ ├── ch4_54-1.png │ │ ├── ch4_59-1.png │ │ ├── ch4_60-1.png │ │ ├── ch5_14-1.png │ │ ├── ch5_15-1.png │ │ ├── ch5_15-2.png │ │ ├── ch5_15-3.png │ │ ├── ch5_15-4.png │ │ ├── ch5_17-1.png │ │ ├── ch5_21-1.png │ │ ├── ch5_22-1.png │ │ ├── ch5_23-1.png │ │ ├── ch5_25-1.png │ │ ├── ch5_26-1.png │ │ ├── ch6_14-1.png │ │ ├── ch6_16-1.png │ │ ├── ch6_19-1.png │ │ ├── ch6_21-1.png │ │ ├── ch6_21b-1.png │ │ ├── ch6_22-1.png │ │ ├── ch6_23-1.png │ │ ├── ch6_25-1.png │ │ ├── ch7_12-1.png │ │ ├── ch7_13-1.png │ │ ├── ch7_16-1.png │ │ ├── ch7_22-1.png │ │ ├── ch7_28-1.png │ │ ├── ch7_29-1.png │ │ ├── ch7_40-1.png │ │ ├── ch7_41-1.png │ │ ├── ch7_5-1.png │ │ ├── ch7_9-1.png │ │ ├── ch8_20-1.png │ │ ├── ch9_10-1.png │ │ ├── ch9_11-1.png │ │ ├── ch9_15-1.png │ │ ├── ch9_7-1.png │ │ ├── ch9_8-1.png │ │ ├── unnamed-chunk-19-1.png │ │ ├── unnamed-chunk-20-1.png │ │ ├── unnamed-chunk-22-1.png │ │ ├── unnamed-chunk-23-1.png │ │ ├── unnamed-chunk-25-1.png │ │ ├── unnamed-chunk-26-1.png │ │ ├── unnamed-chunk-29-1.png │ │ ├── unnamed-chunk-3-1.png │ │ └── unnamed-chunk-6-1.png ├── activity.html ├── behavior.html ├── composition.html ├── covariates.html ├── data-creation.html ├── density.html ├── error-checking.html ├── exploration.html ├── habitat-use.html ├── images │ ├── Ninja.png │ ├── analysis_covariates │ │ └── rgee_check.PNG │ ├── community_metrics │ │ ├── AsyEst.PNG │ │ ├── coverage_based.PNG │ │ ├── data_format.PNG │ │ ├── data_info.PNG │ │ ├── size_based.PNG │ │ └── unit_based.png │ ├── course-preparation │ │ ├── New_directory.png │ │ ├── R_studio_start.PNG │ │ ├── click_download.png │ │ ├── copy.png │ │ ├── file_window.PNG │ │ ├── final_files.PNG │ │ ├── new_project.png │ │ ├── new_project_2.png │ │ ├── paste_files.PNG │ │ └── project_name.png │ ├── data_creation │ │ ├── cap_matrix.PNG │ │ ├── effort_lookup.PNG │ │ └── ind_dat.PNG │ ├── exploration │ │ └── project_your_locations.PNG │ └── preprocessing │ │ ├── folder_structure.PNG │ │ ├── function.png │ │ ├── human_blur.jpg │ │ ├── labelers.PNG │ │ └── renamer.png ├── index.html ├── libs │ ├── Proj4Leaflet-1.0.1 │ │ └── proj4leaflet.js │ ├── anchor-sections-1.1.0 │ │ ├── anchor-sections-hash.css │ │ ├── anchor-sections.css │ │ └── anchor-sections.js │ ├── bsTable-3.3.7 │ │ ├── bootstrapTable.js │ │ └── bootstrapTable.min.css │ ├── clipboard-0.0.1 │ │ └── setClipboardText.js │ ├── crosstalk-1.2.0 │ │ ├── css │ │ │ └── crosstalk.min.css │ │ └── js │ │ │ └── crosstalk.min.js │ ├── gitbook-2.6.7 │ │ ├── css │ │ │ ├── fontawesome │ │ │ │ └── fontawesome-webfont.ttf │ │ │ ├── plugin-bookdown.css │ │ │ ├── plugin-clipboard.css │ │ │ ├── plugin-fontsettings.css │ │ │ ├── plugin-highlight.css │ │ │ ├── plugin-search.css │ │ │ ├── plugin-table.css │ │ │ └── style.css │ │ └── js │ │ │ ├── app.min.js │ │ │ ├── clipboard.min.js │ │ │ ├── jquery.highlight.js │ │ │ ├── plugin-bookdown.js │ │ │ ├── plugin-clipboard.js │ │ │ ├── plugin-fontsettings.js │ │ │ ├── plugin-search.js │ │ │ └── plugin-sharing.js │ ├── htmlwidgets-1.5.4 │ │ └── htmlwidgets.js │ ├── htmlwidgets-1.6.2 │ │ └── htmlwidgets.js │ ├── jquery-3.6.0 │ │ └── jquery-3.6.0.min.js │ ├── kePrint-0.0.1 │ │ └── kePrint.js │ ├── leaflet-1.3.1 │ │ ├── images │ │ │ ├── layers-2x.png │ │ │ ├── layers.png │ │ │ ├── marker-icon-2x.png │ │ │ ├── marker-icon.png │ │ │ └── marker-shadow.png │ │ ├── leaflet.css │ │ └── leaflet.js │ ├── leaflet-binding-2.1.1 │ │ └── leaflet.js │ ├── leaflet-binding-2.1.2 │ │ └── leaflet.js │ ├── leaflet-binding-2.2.0 │ │ └── leaflet.js │ ├── leaflet-providers-1.13.0 │ │ └── leaflet-providers_1.13.0.js │ ├── leaflet-providers-1.9.0 │ │ └── leaflet-providers_1.9.0.js │ ├── leaflet-providers-plugin-2.1.1 │ │ └── leaflet-providers-plugin.js │ ├── leaflet-providers-plugin-2.1.2 │ │ └── leaflet-providers-plugin.js │ ├── leaflet-providers-plugin-2.2.0 │ │ └── leaflet-providers-plugin.js │ ├── leafletfix-1.0.0 │ │ └── leafletfix.css │ ├── lightable-0.0.1 │ │ └── lightable.css │ ├── plotly-binding-4.10.0 │ │ └── plotly.js │ ├── plotly-binding-4.10.1 │ │ └── plotly.js │ ├── plotly-binding-4.10.2 │ │ └── plotly.js │ ├── plotly-htmlwidgets-css-2.11.1 │ │ └── plotly-htmlwidgets.css │ ├── plotly-htmlwidgets-css-2.5.1 │ │ └── plotly-htmlwidgets.css │ ├── plotly-main-2.11.1 │ │ └── plotly-latest.min.js │ ├── plotly-main-2.5.1 │ │ └── plotly-latest.min.js │ ├── proj4-2.6.2 │ │ └── proj4.min.js │ ├── rstudio_leaflet-1.3.1 │ │ └── rstudio_leaflet.css │ └── typedarray-0.1 │ │ └── typedarray.min.js ├── occupancy.html ├── on-your-own-5.html ├── on-your-own-7.html ├── prep.html ├── preprocessing-and-labelling.html ├── reference-keys.txt ├── references.html ├── search_index.json ├── standard.html └── style.css ├── images ├── Ninja.png ├── analysis_covariates │ └── rgee_check.PNG ├── community_metrics │ ├── AsyEst.PNG │ ├── coverage_based.PNG │ ├── data_format.PNG │ ├── data_info.PNG │ ├── iNext_est.PNG │ ├── size_based.PNG │ ├── survey_unit.PNG │ └── unit_based.png ├── course-preparation │ ├── New_directory.png │ ├── R_studio_start.PNG │ ├── click_download.png │ ├── copy.png │ ├── extract.PNG │ ├── file_window.PNG │ ├── final_files.PNG │ ├── new_project.png │ ├── new_project_2.png │ ├── paste_files.PNG │ └── project_name.png ├── data_creation │ ├── cap_matrix.PNG │ ├── effort_lookup.PNG │ └── ind_dat.PNG ├── exploration │ └── project_your_locations.PNG └── preprocessing │ ├── folder_structure.PNG │ ├── function.png │ ├── human_blur.jpg │ ├── labelers.PNG │ └── renamer.png ├── index.Rmd ├── now.json ├── packages.bib ├── preamble.tex ├── style.css └── toc.css /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.github/workflows/deploy_bookdown.yml: -------------------------------------------------------------------------------- 1 | name: scheduleRenderbook 2 | on: 3 | push: 4 | 5 | 6 | jobs: 7 | bookdown: 8 | name: Render-Book 9 | runs-on: macos-latest # ubuntu-latest # macos USES 10 TIMES THE MINUTES 10 | steps: 11 | - uses: actions/checkout@v2 12 | # More detail here, https://github.com/r-lib/actions 13 | # It's possible to define R and pandoc version if desired 14 | - uses: r-lib/actions/setup-r@v2 15 | - uses: r-lib/actions/setup-pandoc@v2 16 | - name: Install TinyTeX 17 | uses: r-lib/actions/setup-tinytex@v2 18 | env: 19 | # install full prebuilt version 20 | TINYTEX_INSTALLER: TinyTeX 21 | - name: Install packages 22 | run: Rscript -e 'install.packages(c("rmarkdown","bookdown", 23 | "activity", 24 | "corrplot", 25 | "cowplot", 26 | "dplyr", 27 | "progress", 28 | "elevatr", 29 | "gfcanalysis", 30 | "ggplot2", 31 | "gridExtra", 32 | "iNEXT", 33 | "kableExtra", 34 | "Hmsc", 35 | "leaflet", 36 | "lme4", 37 | "lubridate", 38 | "magrittr", 39 | "MCMCvis", 40 | "modisfast", 41 | "osmdata", 42 | "pals", 43 | "plotly", 44 | "remotes", 45 | "rmarkdown", 46 | "sf", 47 | "spOccupancy", 48 | "stars", 49 | "stringr", 50 | "terra", 51 | "tibble", 52 | "tidyr", 53 | "unmarked", 54 | "viridis", 55 | "installr", 56 | "jtools", 57 | "vegan", 58 | "MuMIn", 59 | "usedist", 60 | "taxize"))' 61 | - name: Install traitdata 62 | run: Rscript -e "remotes::install_github('RS-eco/traitdata')" 63 | env: 64 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 65 | - name: Install remBoot 66 | run: Rscript -e 'remotes::install_github("arcaravaggi/remBoot")' 67 | env: 68 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 69 | - name: Install spaceNtime 70 | run: Rscript -e 'remotes::install_github("annam21/spaceNtime")' 71 | env: 72 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 73 | - name: Render Book 74 | run: Rscript -e 'bookdown::render_book("index.Rmd")' 75 | - uses: actions/upload-artifact@v4 76 | with: 77 | name: docs 78 | path: docs/ 79 | 80 | checkout-and-deploy: 81 | runs-on: ubuntu-latest 82 | needs: bookdown 83 | steps: 84 | - name: Checkout 85 | uses: actions/checkout@v4 86 | - name: Download artifact 87 | uses: actions/download-artifact@v4 88 | with: 89 | # Artifact name 90 | name: docs # optional 91 | # Destination path 92 | path: docs # optional 93 | - name: Deploy to GitHub Pages 94 | uses: Cecilapp/GitHub-Pages-deploy@v3 95 | env: 96 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 97 | with: 98 | build_dir: docs 99 | branch: gh-pages 100 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | _publish.R 5 | _book 6 | _bookdown_files 7 | rsconnect 8 | data/processed_data/ -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: r 2 | cache: packages 3 | pandoc_version: 2.9.2.1 4 | 5 | addons: 6 | apt: 7 | packages: 8 | - ghostscript 9 | 10 | before_script: 11 | - chmod +x ./_build.sh 12 | - chmod +x ./_deploy.sh 13 | 14 | script: 15 | - ./_build.sh 16 | - ./_deploy.sh 17 | -------------------------------------------------------------------------------- /01-Preprocessing.Rmd: -------------------------------------------------------------------------------- 1 | # Preprocessing and labelling 2 | 3 | Once you have deployed your camera traps and brought your SD cards. We have several steps we need to perform before we can start analyzing the data: 4 | 5 | - backup the data 6 | - pre-process the files 7 | - label the footage 8 | - update deployment end dates 9 | 10 | We summarise each step below and point to useful tools where necessary. 11 | 12 | ## Data storage 13 | 14 | The file structure of your data backups depends on the structure of your project. We use one of two different options, which each have their merits: 15 | 16 | *1) Location based* 17 | 18 | This is likely the most intuitive method if you are manually sorting data or using an image labeller (software to manage your camera data) which uses the location as the key organizing element. You would make a folder using the 'placename' (unique location where a camera is deployed), then copy all of the data relating to that site within it (left). Note, if you had multiple camera deployments you would have nested folders with the 'deployment_id' as the name: 19 | 20 | ```{r ch1_1, echo=F, message=F, warning=F} 21 | knitr::include_graphics("images/preprocessing/folder_structure.PNG") 22 | ``` 23 | 24 | *2) Deployment based* 25 | 26 | Increasingly camera trap management platforms are 'deployment' driven rather than location based (e.g. Wildlife Insights). In this instance, the images are placed within a folder named with the `deployment_id` (the unique code corresponding to that deployment), typically within a single folder. 27 | 28 | In this scenario, we would have a folder called 'to upload' with all of the unique deployment folders within it. Then, once the folder has been upload to the platform, then the folder is moved to an "uploaded" folder: 29 | 30 | Crucially - make redundant copies to ensure you do not lose data. We make both local and cloud-based copies of our data sets. 31 | 32 | ## Preprocessing 33 | 34 | The following steps represent optional elements to apply to your data. Whether you need them depends on your questions, the platform you are using to label your data, and the volume of images you will be processing. 35 | 36 | ### Renaming 37 | 38 | When a camera takes images, it applies sequential names which are duplicated across cameras (e.g. RCNX0001, RCNX0002 etc). In the future, if files are accidentally moved it would be difficult (if not impossible) to trace them back their origin. One way to get around this is to rename every camera image with a unique code (e.g. placename_datetime) which will ensure that line of data you generate can be traced back to an image, regardless of how it is stored. 39 | 40 | We have created a tool which can be applied to folders of images organised by location and deployment, to create unique codes for each image:[the WildCo Image Renamer](https://github.com/WildCoLab/WildCo_Image_Renamer). The repository has an example dataset which you can play around with to get familiar with the tool. 41 | 42 | ```{r ch1_2, echo=F, message=F, warning=F, out.width="75%"} 43 | knitr::include_graphics("images/preprocessing/renamer.PNG") 44 | ``` 45 | 46 | 47 | ### Automated Labelers 48 | 49 | Once you have backed up and renamed your images, you may want to process them with an Artificial Intelligence (AI) labeler. Although they are pretty cool and `in vogue` right now, the desicion to use one (or not) should be based on several points: 50 | 51 | **- The number of image you have to process** If you only have a small dataset (a few thousand images) it is likely easier to do manually 52 | 53 | **- Whether there is an AI labeler validated for your study area and strata** Despite the claims of their authors, AI labelers are not perfect. If they haven't been validated in your survey location then use extreme caution when applying it. For example, an AI algorithm developed for terrestrial camera traps data will likely not work well on an arboreal dataset. 54 | 55 | **- How much money you have** For AI labelers to run quickly, you may need some very expensive computer gear or cloud computing time. Do not assume that this is cheaper than manual labor! 56 | 57 | **- The resolution of the labels you require** AI labelers are getting pretty good at sifting out blank images, but they have a long way to be before they can reliably split ground squirrel species (*Urocitellus sp*), or long-nosed armadillo species (*Dasypus sp.*)! 58 | 59 | For a very pragmatic and informed take on the current state of the art, see Saul Greenberg's [Automated Image Recognition for Wildlife Camera Traps: Making it Work for You.](https://wildcams.ca/site/assets/files/1389/2020-08-greenberg-imagerecognitioncameratraps_updated.pdf) report. 60 | 61 | One of the biggest players in the game is undoubtedly [Megadetector](https://github.com/microsoft/CameraTraps/blob/main/megadetector.md). Click the link for an overview of the machine learning model and how it might work for you. 62 | 63 | Finally, some platforms now have their own inbuilt labeling AI (e.g. Wildlife Insights), which is certainly much more accessible than developing your own. Our only advice is be weary of the identifications they generate and always check your data (a.k.a. keep a human in the loop - at least for now). 64 | 65 | ### Sensitive images 66 | 67 | One of the benefits of AI labelers is you can use them to remove sensitive information (such as peoples identities) from images without ever looking at them. An example of this would be camera trapping in protected areas where it is not possible to ask every person if they are happy being photographed for science. Instead, we can use megadetector (or another AI labeler) to tell us when a human is detected in an image, then blur the area of that photo to remove individually identifying information. Previously researchers had to delete the human images to be compliant with privacy requirements - which throws away valuable data of human use. 68 | 69 | The WildCo lab has developed a tool to blur human images using Megadetector outputs: [WildCo_Face_Blur](https://github.com/WildCoLab/WildCo_Face_Blur). Click the link for details on how to use it. 70 | 71 | ```{r ch1_3, echo=F, message=F, warning=F} 72 | knitr::include_graphics("images/preprocessing/human_blur.jpg") 73 | ``` 74 | 75 | For a discussion of its application in a recreational ecology context see: 76 | 77 | [Fennell, Mitchell, Christopher Beirne, and A. Cole Burton. "Use of object detection in camera trap image identification: Assessing a method to rapidly and accurately classify human and animal detections for research and application in recreation ecology." Global Ecology and Conservation 35 (2022): e02104.](https://www.sciencedirect.com/science/article/pii/S2351989422001068) 78 | 79 | 80 | ### Timelapse extraction 81 | 82 | Timelapse photographs can be critical to determine when cameras are functioning, particularly in low productivity environments where wildlife detections are rare. We highly recommend you take a photo at noon each day! They can also be used to generate site-level vegetation indices, such as NDVI, as they are taken at the same time every day. However, you likely don't want to be sort through thousands of images of leaves and grass, or if you want to extract the images to run through a different program (e.g. `phenopix` package - see [the covariates chapter](#covariates)). 83 | 84 | To quickly extract timelapse images we develop some code which uses the metadata of the images to filter out timelapse photos from motion detected photos. It is packaged up as part of the [WildCo_Image_renamer](https://github.com/WildCoLab/WildCo_Image_Renamer) script. 85 | 86 | ## Labelling 87 | 88 | ```{r ch1_4, echo=F, message=F, warning=F, out.width="100%"} 89 | knitr::include_graphics("images/preprocessing/labelers.png") 90 | ``` 91 | 92 | We often get asked what the best software/data platform is for labeling images... and **the pragmatic answer is that it does not matter as long as you export your data in a standardised format** (see [the data standardisation chapter](#standard). The truth is that different projects have different needs: 93 | 94 | - If you have a poor internet connection you might need to use a standalone offline software, such as Timelapse 95 | - Or if you work internationally with a large team of labelers who will tag images simultaneously, an online data platform, such as Wildlife Insights, might be essential 96 | 97 | Dan Morris has curated a fantastic list of currently available tools here: [Everything I know about machine learning and camera traps](https://agentmorris.github.io/camera-trap-ml-survey/). 98 | 99 | In a nutshell: 100 | 101 | **Data platforms** are web- and desktop-based tools used for efficient and standardized data management, sharing, and analysis of remote camera data. A number of platforms exist so it is important that users choose the one best suited to their needs. To help camera trap users make this decision, [the Wildcam network](https://wildcams.ca) has developed a comparison of different camera data platforms. It provides an overview of platforms and software used in remote camera research in western Canada. As software and online tools are often subject to frequent updates and change, we recognize this as a document subject to change over time. [Click here](https://wildcams.ca/site/assets/files/1389/overviewofcamerawebsites_draft_2020-06-17.xlsx) to review the comparison (last updated June 2020). We welcome feedback at any time (info@wildcams.ca) 102 | 103 | **Software** are programs specifically designed for camera trap photos and their associated data is now recognized as the best method for data processing. There are quite a few programs available for practitioners, but many of them have most of the same functionalities. The relatively few unique features that distinguish programs will help to determine what software to use, and what features are needed for specific studies will vary depending on their study designs. See: 104 | 105 | [Wearn, O. R. and P. Glover-Kapfer. 2017. Camera-trapping for conservation: a guide to best-practices. WWF conservation technology series 1.1 181.](https://www.researchgate.net/publication/320402776_Camera-trapping_for_conservation_a_guide_to_best-practices) 106 | 107 | [Young, S., J. Rode‐Margono and R. Amin. 2018. Software to facilitate and streamline camera trap data management: a review. Ecology and Evolution, 8: 9947-9957.](https://onlinelibrary.wiley.com/doi/full/10.1002/ece3.4464) 108 | 109 | ## End dates and outages 110 | 111 | It is very important to note that camera deployments do not end when you pickup the camera - they end when the camera **stops collecting comparable data**. The best time to record date a camera stops functioning probably is when you are labeling images. Do not cut this corner! 112 | 113 | Below is the same camera station, at two points in time. The data from these are not comparable - if a tree fell on you whilst you were out counting animals you would probably count less effectively too! We would edit the deployment end to to reflect when it stopped recording comparable data (not all examples are as clear cut as this one). 114 | 115 | ```{r ch1_5, echo=F, message=F, warning=F, out.width="100%"} 116 | knitr::include_graphics("images/preprocessing/function.png") 117 | ``` 118 | 119 | -------------------------------------------------------------------------------- /02-standardisation.Rmd: -------------------------------------------------------------------------------- 1 | # Metadata standardisation {#standard} 2 | 3 | The images produced by camera traps alone are useless. We need to keep accurate records of how the data were collected, labelled, and manipulated if we are to achieve the goal of synthesizing data from multiple projects. Thus, metadata is simply “data that provides information about other data”. 4 | 5 | The benefits of 'standardizing' the metadata associated with camera traps, or other sensors of biodiversity, are hopefully clear - it should facilitate the rapid and robust exploration, analysis and sharing of information on wildlife populations. Ultimately resulting in more robust, repeatable, and timely research and management decisions. 6 | 7 | ```{r ch2_1, echo=F, results='hide', message =F, warning=F} 8 | # Check you have them and load them 9 | list.of.packages <- c("kableExtra", "tidyr") 10 | new.packages <- list.of.packages[!(list.of.packages %in% installed.packages()[,"Package"])] 11 | if(length(new.packages)) install.packages(new.packages) 12 | lapply(list.of.packages, require, character.only = TRUE) 13 | 14 | ``` 15 | 16 | ## The Wildlife Insights Minimum Metadata Standards 17 | 18 | The convention we use in this course is the [data standards used by Wildlife Insights](https://docs.google.com/spreadsheets/d/1Jg-WybmVeGlWGrbPpwuwJCgranOV1r3M_LrzELttfK0/edit#gid=412365965). 19 | 20 | Their standard format is composed of four different elements: 21 | 22 | - *Project data* `projects.csv` a dataframe containing key information about the project itself, e.g. how the cameras were deployed and what the target features were. 23 | - *Image data* `images.csv` a dataframe containing all of the information contained within each image. This information is typically added by humans, but increasing we are using artificial intelligence to speed up this process. 24 | - *Deployment data* `deployments.csv` a dataframe listing the activity of the camera traps involved in your study, and any issues encountered during deployments which may influence their analysis 25 | - *Camera data* `cameras.csv` a dataframe all the cameras deployed in the project 26 | 27 | Below we give a quick summary and explanation of each. 28 | 29 | First, read in the data files: 30 | 31 | ```{r ch2_2, message+f, warning=F, class.source="Rinfo"} 32 | pro <- read.csv("data/raw_data/example_data/projects.csv", header=T) 33 | img <- read.csv("data/raw_data/example_data/images.csv", header=T) 34 | dep <- read.csv("data/raw_data/example_data/deployments.csv", header=T) 35 | cam <- read.csv("data/raw_data/example_data/cameras.csv", header=T) 36 | ``` 37 | 38 | Let's look at each one in turn. 39 | 40 | ### Project data 41 | 42 | The project files contains a general description of the project. It should give someone a helicopter overview of your project, and provide the data usage guidelines. 43 | 44 | ```{r project data, echo=F, message=F} 45 | t(pro) %>% 46 | kbl() %>% 47 | kable_styling(full_width = T) %>% 48 | column_spec(1, bold = T, border_right = T) 49 | ``` 50 | 51 | ### Image data 52 | 53 | This file contains the image labels - what is in each picture and its properties. Each image you have processed is linked to at least one row in the detection data. Multiple rows may exist if there are multiple species in a camera trap image, or if you are identifying multiple unique individuals. 54 | 55 | ```{r ch2_3, echo=F} 56 | kbl(head(img))%>% 57 | kable_paper() %>% 58 | scroll_box(width = "750px", height = "200px") 59 | ``` 60 | 61 | 62 | ### Deployment data 63 | 64 | This is the camera deployment data - where the deployment occurred, when it started, when it ended and other relevant information about each unique deployment. 65 | 66 | ```{r ch2_4, echo=F} 67 | kbl(head(dep))%>% 68 | kable_paper() %>% 69 | scroll_box(width = "750px", height = "200px") 70 | 71 | ``` 72 | 73 | ### Camera inventory 74 | 75 | An inventory of all the cameras used in the project. Ideally, each camera would be represented in the deployment data. This technically isn't 100% necessary to analyse your dataset, although there are some scenarios where it might help. 76 | 77 | ```{r ch2_5, echo=F} 78 | kbl(cam)%>% 79 | kable_paper() %>% 80 | scroll_box(width = "750px", height = "200px") 81 | 82 | ``` 83 | 84 | ### Important note 85 | These are simply the minimum sheets you require - we derive a lot of other useful data frames when moving from raw camera data to analyzable camera data. See the [Creating analysis dataframes section](#data-creation) for further examples. 86 | 87 | 88 | **Further Reading** 89 | 90 | Forrester, T. et al. An open standard for camera trap data. Biodivers. Data J. 4, (2016). 91 | 92 | Meek, P. D., et al. "Recommended guiding principles for reporting on camera trapping research." Biodiversity and conservation 23.9 (2014) 93 | 94 | [RISC Wildlife Camera Metadata Protocol](https://www2.gov.bc.ca/assets/gov/environment/natural-resource-stewardship/nr-laws-policy/risc/wcmp_v1.pdf) 95 | 96 | 97 | 98 | 99 | 100 | -------------------------------------------------------------------------------- /09-activity.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: html_document 3 | editor_options: 4 | chunk_output_type: console 5 | --- 6 | 7 | # Activity 8 | 9 | Given that camera traps operate 24 hours a day, 7 days a week, and can record animal motion down to second-level precision, they represent a powerful tool to explore and contrast the activity patterns of the species they detect! Such analyses can give insight into competition, predation and coexistence. Characterizing the "activity level" - the proportion of the day which animals are active - is also increasingly important for new estimators of animal density ([see the density chapter for more info](#density)). Consequently, understanding how to derive and use activity data is very important for people using camera traps. 10 | 11 | *Must read* [Frey, Sandra, et al. "Investigating animal activity patterns and temporal niche partitioning using camera‐trap data: Challenges and opportunities." Remote Sensing in Ecology and Conservation 3.3 (2017): 123-132.](https://zslpublications.onlinelibrary.wiley.com/doi/full/10.1002/rse2.60) 12 | 13 | Two key packages 14 | 15 | - `overlap` 16 | - `activity` 17 | 18 | They each use the timestamps in camera trap detetions to derive activity indices which can be compared between different strata of interest (e.g. species, treatments etc.). 19 | 20 | Here we will use the `activity` package. 21 | 22 | ## Independent detections or raw data? 23 | 24 | A recent paper has highlighted that we need to carefully consider our data source for activity analyses: 25 | 26 | [Christopher Peral, Marietjie Landman, Graham I. H. Kerley The inappropriate use of time-to-independence biases estimates of activity patterns of free-ranging mammals derived from camera traps Ecology and Evolution](https://onlinelibrary.wiley.com/doi/10.1002/ece3.9408?af=R) 27 | 28 | Whilst we typically use "independent data" for most of our camera trap analysis, doing so may throw away useful data on activity. Both in terms of the number of data points (power) but also the activity patterns they generate. Peral et.al show that 70% of papers published to date use independent data to derive their indices. They actually state:**"We conclude that the application of time-to-independence data filters in camera trap-based estimates of activity patterns is not valid and should not be used."** 29 | 30 | So we will use the raw data to derive our indices! 31 | 32 | **Load your packages** 33 | 34 | ```{r ch9_1, echo=T, results='hide', message =F, warning=F, class.source="Rmain"} 35 | # Check you have them and load them 36 | list.of.packages <- c("kableExtra", "tidyr", "ggplot2", "gridExtra", "activity", "overlap", "dplyr","lubridate" ) 37 | 38 | new.packages <- list.of.packages[!(list.of.packages %in% installed.packages()[,"Package"])] 39 | if(length(new.packages)) install.packages(new.packages) 40 | lapply(list.of.packages, require, character.only = TRUE) 41 | 42 | ``` 43 | 44 | ## Data formatting 45 | 46 | First, let's import the processed raw data file. 47 | 48 | ```{r ch9_2, class.source="Rmain"} 49 | # Import the data 50 | img <- read.csv("data/processed_data/AlgarRestorationProject_raw_detections.csv", header=T) 51 | ``` 52 | 53 | Which looks like this: 54 | 55 | ```{r ch9_3, echo=F} 56 | kbl(head(img))%>% 57 | kable_paper() %>% 58 | scroll_box(height = "200px") 59 | ``` 60 | 61 | Then load the activity package: 62 | 63 | ```{r ch9_4, class.source="Rmain"} 64 | # Load the package 65 | library(activity) 66 | ``` 67 | 68 | If your cameras correct for daylight savings use the appropriate timezone code, if they do not, use UTC. 69 | 70 | ```{r ch9_5, results="hide", class.source="Rmain"} 71 | img$timestamp <- ymd_hms(img$timestamp, tz="UTC") 72 | ``` 73 | 74 | Note - find your timezone code for the `tz=` call [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). 75 | 76 | ### Accounting for sunrise and sunset 77 | 78 | A recent paper highlighted the challenges in trying to understand animal activity patterns at high latitudes - as sunrise/sunset timings vary substantially through the calender year. See: 79 | 80 | [Vazquez, Carmen, et al. "Comparing diel activity patterns of wildlife across latitudes and seasons: Time transformations using day length." Methods in Ecology and Evolution 10.12 (2019): 2057-2066.](https://besjournals.onlinelibrary.wiley.com/doi/full/10.1111/2041-210X.13290) 81 | 82 | If we want to compare activity patterns between two different locations, or different seasons, the day length at the time the detection occurred can have a huge impact on our estimates of wildlife activity. For example, if we wanted to compare day/night activity between winter and summer periods, in winter animal activity is constrained to a much shorter day length. 83 | 84 | Fortunately, the authors have a solution! 85 | 86 | **The average anchoring method** 87 | 88 | Instead of using the 'human' 24h clock, we can instead express animal activity relative to an important anchor point in the day (e.g. sunrise). 89 | 90 | NOTE -the transformation is not necessary at latitudes below 20°, or in studies with a duration of less than a month (below 40° latitude), as day length doesn't chnage substantially. 91 | 92 | ```{r ch9_6, class.source="Rmain", results='hide', message =F, warning=F} 93 | # We need to add latitude and longitude to our observations 94 | # import our station locations (and other covariates) 95 | locs <- read.csv("data/processed_data/AlgarRestorationProject_camera_locations_and_covariates.csv") 96 | 97 | # Add them to our data frame 98 | img_locs <- left_join(img, locs) 99 | 100 | # calculate solar time 101 | tmp <- solartime ( img_locs$timestamp, # the date time column 102 | img_locs$latitude, # Latitude 103 | img_locs$longitude, # Longitude 104 | tz=-6, # an offset in numeric hours to UTC (Alberta is 6 hours behind) 105 | format="%Y-%m-%d %H:%M:%S") 106 | 107 | # Although we want to use solar time, let's add both incase you want to explore the implications 108 | img_locs$solar <- tmp$solar 109 | img_locs$clock <- tmp$clock 110 | 111 | ``` 112 | 113 | Let's check out the relationship between these two indices: 114 | 115 | ```{r ch9_7, class.source="Rinfo"} 116 | plot(img_locs$clock, img_locs$solar) 117 | ``` 118 | 119 | We are now ready to fit some models! 120 | 121 | ## Species comparisons 122 | 123 | Let's start with a white-tailed deer and caribou example. 124 | 125 | Note we are reducing the number of replicates to 100 to speed up the process - typically people use 1000. 126 | 127 | ```{r ch9_8, class.source="Rmain"} 128 | # Fit an activity model 129 | m1 <- fitact(img_locs$solar[img_locs$sp=="Odocoileus.virginianus"], sample="model", reps=100) 130 | plot(m1) 131 | ``` 132 | 133 | Take a look at the raw data if you want. 134 | 135 | ```{r ch9_9, eval=FALSE, class.source="Rinfo"} 136 | m1 137 | ``` 138 | 139 | And repeat it for Caribou 140 | 141 | ```{r ch9_10, class.source="Rmain"} 142 | # Fit an activity model 143 | m2 <- fitact(img_locs$solar[img_locs$sp=="Rangifer.tarandus"], sample="model", reps=100) 144 | plot(m2) 145 | ``` 146 | 147 | We can plot both on the same axis as follows: 148 | 149 | ```{r ch9_11, class.source="Rmain"} 150 | plot(m2, yunit="density", data="none", las=1, lwd=2, 151 | tline=list(lwd=2), # Thick line 152 | cline=list(lty=0)) # Supress confidence intervals 153 | 154 | plot(m1, yunit="density", data="none", add=TRUE, 155 | tline=list(col="red", lwd=2), 156 | cline=list(lty=0)) 157 | 158 | legend("topright", c("Caribou", "Deer"), col=1:2, lty=1, lwd=2) 159 | ``` 160 | 161 | We can compare different activity patterns using coefficient of overlap (∆) - developed by Ridout and Linkie: 162 | 163 | [Ridout, Martin S., and Matthew Linkie. "Estimating overlap of daily activity patterns from camera trap data." Journal of Agricultural, Biological, and Environmental Statistics 14.3 (2009): 322-337.](https://link.springer.com/article/10.1198/jabes.2009.08038) 164 | 165 | The coefficient ranges from 0 (no overlap) to 1 (complete overlap). We can implement for a two species comparison as follows: 166 | 167 | ```{r ch9_12, class.source="Rmain"} 168 | # Note reps reduced to speed up running time - people typically use 1000. 169 | compareCkern(m1, m2, reps = 100) 170 | ``` 171 | 172 | The output above represents: 173 | 174 | 0 = no overlap and 1 = high overlap! 175 | 176 | - `obs` = observed overlap index; 177 | - `null` = mean null overlap index; 178 | - `seNull` = standard error of the null distribution; 179 | - `pNull` = probability observed index arose by chance. 180 | 181 | Which suggests there is reasonably high overlap between the two species - and that it did not come about by chance. 182 | 183 | ## Treatment comparisons 184 | 185 | We can also compare patterns within a species across different strata of interest. For example, perhaps white-tailed deer change their activity patterns in response to the `feature_type` they are using - perhaps they will be more nocturnal on `HumanUse` lines relative to `Offline` strata. Lets try it: 186 | 187 | **White-tail deer on HumanUse feature** 188 | 189 | ```{r ch9_13, class.source="Rmain"} 190 | #Fit an activity model 191 | m1 <- fitact(img_locs$solar[img_locs$sp=="Odocoileus.virginianus" & 192 | img_locs$feature_type=="HumanUse"], sample="model", reps=100) 193 | ``` 194 | 195 | **White-tail deer on Offline feature** 196 | 197 | ```{r ch9_14, class.source="Rmain"} 198 | m2 <- fitact(img_locs$solar[img_locs$sp=="Odocoileus.virginianus" & 199 | img_locs$feature_type=="Offline"], sample="model", reps=100) 200 | ``` 201 | 202 | Plot the results: 203 | 204 | ```{r ch9_15, class.source="Rmain"} 205 | 206 | plot(m2, yunit="density", data="none", las=1, lwd=2, 207 | tline=list(lwd=2), # Thick line 208 | cline=list(lty=0)) # Supress confidence intervals 209 | 210 | plot(m1, yunit="density", data="none", add=TRUE, 211 | tline=list(col="red", lwd=2), 212 | cline=list(lty=0)) 213 | 214 | legend("topright", c("Offline", "HumanUse"), col=1:2, lty=1, lwd=2) 215 | ``` 216 | 217 | And perform the statistical test: 218 | 219 | ```{r ch9_16, class.source="Rmain"} 220 | # Note reps reduced to speed up running time - people typically use 1000. 221 | compareCkern(m1, m2, reps = 100) 222 | ``` 223 | 224 | There is very high overlap for these comparisons, and it is unlikely to have arisen by chance! So it seems the edidence for changes in temporal activity in response to feature_type is weak - at least for the white-tiled deer! 225 | 226 | 227 | ## On your own 228 | 229 | Try your own species comparisons. Remember we have the following species: 230 | 231 | ```{r ch9_17, echo=F} 232 | unique(img_locs$sp) 233 | ``` 234 | 235 | You can also try other categorical strata comparisons, we have: 236 | 237 | `feature_type` 238 | 239 | ```{r ch9_18, echo=F} 240 | table(locs$feature_type) 241 | ``` 242 | 243 | ## Selected further reading 244 | 245 | [Houngbégnon, Fructueux GA, et al. "Daily Activity Patterns and Co-Occurrence of Duikers Revealed by an Intensive Camera Trap Survey across Central African Rainforests." Animals 10.12 (2020): 2200.](https://pubmed.ncbi.nlm.nih.gov/33255400/) 246 | 247 | [Ross J, Hearn AJ, Johnson PJ, Macdonald DW (2013). Activity patterns and temporal avoidance by prey in response to Sunda clouded leopard predation risk. Journal of Zoology, 290(2), 96,106.](https://zslpublications.onlinelibrary.wiley.com/doi/10.1111/jzo.12018) 248 | 249 | [Azevedo FC, Lemos FG, Freitas-Junior MC, Rocha DG, Azevedo FCC (2018). Puma activity patterns and temporal overlap with prey in a human-modifed landscape at Southeastern Brazil." Journal of Zoology](https://zslpublications.onlinelibrary.wiley.com/doi/abs/10.1111/jzo.12558) 250 | -------------------------------------------------------------------------------- /11-behaviour.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: html_document 3 | editor_options: 4 | chunk_output_type: console 5 | --- 6 | # Behavior {#behavior} 7 | 8 | Camera traps are being used in increasingly creative ways to understand species behaviours, including: [activity patterns](#activity), foraging tactics, social interactions and predation. Here we will summarise some of the current approaches, and give a quick example using event duration in our example dataset. 9 | 10 | Camera traps are thought to have some advantages over studies which directly observing animals in the wild, including reduce presence in the field (hopefully leading to the capture of "more natural" behaviors), the ability to be deployed in high numbers are for long periods in time, and the potential to capture standardised observation of behaviour comparable across multiple studies. 11 | 12 | See [Caravaggi, A., et al. "A review of factors to consider when using camera traps to study animal behavior to inform wildlife ecology and conservation. Conservat Sci and Prac. 2020; 2." (2020).](https://conbio.onlinelibrary.wiley.com/doi/10.1111/csp2.239) for a balanced consideration of the benefits and potential pitfalls in using cameras for behaviour studies. 13 | 14 | ## Behavioural designations 15 | 16 | Vigilant vs. non-vigilant 17 | 18 | Example [Schuttler, Stephanie G., et al. "Deer on the lookout: how hunting, hiking and coyotes affect white‐tailed deer vigilance." Journal of Zoology 301.4 (2017): 320-327](https://www.researchgate.net/publication/310666089_Deer_on_the_lookout_How_hunting_hiking_and_coyotes_affect_white-tailed_deer_vigilance) 19 | 20 | ## Event duration 21 | 22 | One behavior parameter which is simple to derive from existing camera datasets is the length of the event - which put simply is the interval between the start of a detection event and the end. For this value to be meaningful, it is important that the cameras are on 'motion' trigger (as opposed to time lapse) and that the quiet period between detections is very short - so that we have a good idea of when the animal arrived and departed from the frame. 23 | 24 | What this 'event duration' means very much depends on the context of your study region. For example, if some of your cameras are located in rugged and dense terrain, whereas other are in wide open habitat, 'event duration' could simply represent the resistance to movement of the habitat. However, if your stations are situated in locations which are very similar in ruggedness or vegetation type, and simply differ in terms of some other experimental manipulation - then event duration could mean something very different. 25 | 26 | Our Wildlife Coexistance Laboratory recently publish a paper on using 'event duration' to explore the responses of ungulates to predation risk. 27 | 28 | [Burton, A. Cole, et al. "Behavioral “bycatch” from camera trap surveys yields insights on prey responses to human‐mediated predation risk." Ecology and evolution 12.7 (2022): e9108.](https://onlinelibrary.wiley.com/doi/pdf/10.1002/ece3.9108) 29 | 30 | Typically when we analyse camera trap data we analyse it in units of site_time - for example, the number of detections per week. However in this instance we will be analyzing the individual detection events. 31 | 32 | **load the packages** 33 | 34 | ```{r, echo=F, results='hide', message =F, warning=F, eval=T} 35 | # Check you have them and load them 36 | list.of.packages <- c("kableExtra", "tidyr", "ggplot2", "gridExtra", "phenopix", "plotly") 37 | 38 | new.packages <- list.of.packages[!(list.of.packages %in% installed.packages()[,"Package"])] 39 | if(length(new.packages)) install.packages(new.packages) 40 | lapply(list.of.packages, require, character.only = TRUE) 41 | 42 | ``` 43 | 44 | Let's read in the independent detection data for this project: 45 | 46 | ```{r ch10_1} 47 | ind <- read.csv("data/processed_data/AlgarRestorationProject_30min_independent_detections.csv") 48 | ``` 49 | 50 | And the location information: 51 | 52 | ```{r ch10_2} 53 | locs <- read.csv("data/processed_data/AlgarRestorationProject_camera_locations_and_covariates.csv") 54 | ``` 55 | 56 | 57 | When we created this dataset, we calculated a parameter called `event_duration` - this reflects the interval in seconds between the first detection in an independent event, and the last. 58 | 59 | Lets look at the distribution of independent events across all of our species: 60 | 61 | ```{r ch10_3} 62 | fig <- plot_ly(data=ind,y = ~event_duration, type = "box",split = ~sp) 63 | fig 64 | ``` 65 | 66 | Let's simpify things, and explore the data for a single species at two of the feature_types (treatment strata): 67 | 68 | ```{r ch10_4} 69 | sub_locs <- locs$placename[locs$feature_type %in% c("HumanUse", "Offline")] 70 | 71 | sub_ind <- ind[ind$sp=="Odocoileus.virginianus" & ind$placename %in% sub_locs,] 72 | 73 | sub_ind <- left_join(sub_ind, locs) 74 | 75 | ``` 76 | 77 | Let's check out that boxplot again! 78 | 79 | ```{r ch10_5} 80 | fig <- plot_ly(data=sub_ind,y = ~event_duration, type = "box",split = ~feature_type) 81 | fig 82 | ``` 83 | 84 | It looks like deer may linger longer in offline areas. Lets log the response variable to see if we can make that clearer: 85 | 86 | ```{r ch10_6} 87 | fig <- plot_ly(data=sub_ind,y = ~log(event_duration), type = "box",split = ~feature_type) 88 | fig 89 | ``` 90 | 91 | 92 | ## Animal speed and day range 93 | 94 | [Palencia, Pablo, et al. "Innovations in movement and behavioural ecology from camera traps: day range as model parameter." Methods in Ecology and Evolution.](https://besjournals.onlinelibrary.wiley.com/doi/abs/10.1111/2041-210X.13609) 95 | 96 | ## Interactions 97 | 98 | [Niedballa, Jürgen, et al. "Assessing analytical methods for detecting spatiotemporal interactions between species from camera trapping data." Remote Sensing in Ecology and Conservation 5.3 (2019): 272-285.](https://zslpublications.onlinelibrary.wiley.com/doi/pdf/10.1002/rse2.107) 99 | 100 | ```{r} 101 | # 102 | "One approach estimates spatiotemporal avoidance, that is,to what extent site visitation by species A (the ‘primary’species, hereafter) influences subsequent visitations by spe-cies B (the ‘secondary’ species, hereafter, e.g. Harmsenet al. 2009; Parsons et al. 2016; Karanth et al. 2017). Suchavoidance behaviour can be mediated by olfactory (Apfel-bach et al. 2005; Ferrero et al. 2011), visual (Blumsteinet al. 2000; Stankowich and Coss 2007) or acoustic cues(Hauser and Wrangham 1990). The second, more com-monly used approach assesses temporal segregationbetween species. Here, the temporal overlap in activitybetween two species is estimated to assess whether dailyactivity patterns may have shifted in response to the pres-ence of the other species (Ridout and Linkie 2009; Linkieand Ridout 2011; Foster et al. 2013; Lynam et al. 2013;Ross et al. 2013; Farris et al. 2015; Sunarto et al. 2015).Often, camera trap stations are pooled for this analysis,thus omitting spatial information." 103 | 104 | 105 | # See file on desktop for code 106 | 107 | ``` 108 | 109 | ### One species as a predictor of another 110 | 111 | Tattersall, E. R., Burgar, J. M., Fisher, J. T. & Burton, A. C. Boreal predator co-occurrences reveal shared use of seismic lines in a working landscape. Ecol. Evol. 10, 1678–1691 (2020). 112 | 113 | ### Residual co-occurence models 114 | See: 115 | 116 | Ovaskainen, O. et al. How to make more out of community data? A conceptual framework and its implementation as models and software. Ecol. Lett. 20, 561–576 (2017). 117 | 118 | Linear models: Tikhonov, G. et al. Joint species distribution modelling with the r-package Hmsc. Methods Ecol. Evol. 11, 442–447 (2020). 119 | 120 | Occupancy: Tobler, Mathias W., et al. "Joint species distribution models with species correlations and imperfect detection." Ecology 100.8 (2019): e02754. 121 | 122 | ### Attractance-Avoidance Ratios (AAR) 123 | 124 | For a given species of interest (e.g. humans) we can record the period of time until the next detection of an animal species of interest, as well a the period of since it was last detected before the human detection. The ratio between these times is known as the attractace avoidance ratio. 125 | 126 | See: 127 | [Parsons, A. W., et al. "The ecological impact of humans and dogs on wildlife in protected areas in eastern North America." Biological Conservation 203 (2016): 75-88.](https://www.sciencedirect.com/science/article/abs/pii/S0006320716303603) 128 | 129 | [Naidoo, R. & Burton, A. C. Relative effects of recreational activities on a temperate terrestrial wildlife assemblage. Conserv. Sci. Pract. (2020)](https://conbio.onlinelibrary.wiley.com/doi/epdf/10.1111/csp2.271) 130 | 131 | [Niedballa, Jürgen, et al. "Assessing analytical methods for detecting spatiotemporal interactions between species from camera trapping data." Remote Sensing in Ecology and Conservation 5.3 (2019): 272-285.](https://zslpublications.onlinelibrary.wiley.com/doi/pdf/10.1002/rse2.107) 132 | 133 | -------------------------------------------------------------------------------- /99-references.Rmd: -------------------------------------------------------------------------------- 1 | `r if (knitr:::is_html_output()) ' 2 | # References {-} 3 | '` 4 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: placeholder 2 | Type: Book 3 | Title: Does not matter. 4 | Version: 0.0.1 5 | Imports: bookdown 6 | Remotes: rstudio/bookdown 7 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM conoria/alpine-r-bookdown 2 | 3 | WORKDIR /usr/src 4 | 5 | COPY . . 6 | 7 | RUN R -q -e 'bookdown::render_book("index.Rmd", "bookdown::gitbook")' && mv _book /public 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | CC0 1.0 Universal 2 | 3 | Statement of Purpose 4 | 5 | The laws of most jurisdictions throughout the world automatically confer 6 | exclusive Copyright and Related Rights (defined below) upon the creator and 7 | subsequent owner(s) (each and all, an "owner") of an original work of 8 | authorship and/or a database (each, a "Work"). 9 | 10 | Certain owners wish to permanently relinquish those rights to a Work for the 11 | purpose of contributing to a commons of creative, cultural and scientific 12 | works ("Commons") that the public can reliably and without fear of later 13 | claims of infringement build upon, modify, incorporate in other works, reuse 14 | and redistribute as freely as possible in any form whatsoever and for any 15 | purposes, including without limitation commercial purposes. These owners may 16 | contribute to the Commons to promote the ideal of a free culture and the 17 | further production of creative, cultural and scientific works, or to gain 18 | reputation or greater distribution for their Work in part through the use and 19 | efforts of others. 20 | 21 | For these and/or other purposes and motivations, and without any expectation 22 | of additional consideration or compensation, the person associating CC0 with a 23 | Work (the "Affirmer"), to the extent that he or she is an owner of Copyright 24 | and Related Rights in the Work, voluntarily elects to apply CC0 to the Work 25 | and publicly distribute the Work under its terms, with knowledge of his or her 26 | Copyright and Related Rights in the Work and the meaning and intended legal 27 | effect of CC0 on those rights. 28 | 29 | 1. Copyright and Related Rights. A Work made available under CC0 may be 30 | protected by copyright and related or neighboring rights ("Copyright and 31 | Related Rights"). Copyright and Related Rights include, but are not limited 32 | to, the following: 33 | 34 | i. the right to reproduce, adapt, distribute, perform, display, communicate, 35 | and translate a Work; 36 | 37 | ii. moral rights retained by the original author(s) and/or performer(s); 38 | 39 | iii. publicity and privacy rights pertaining to a person's image or likeness 40 | depicted in a Work; 41 | 42 | iv. rights protecting against unfair competition in regards to a Work, 43 | subject to the limitations in paragraph 4(a), below; 44 | 45 | v. rights protecting the extraction, dissemination, use and reuse of data in 46 | a Work; 47 | 48 | vi. database rights (such as those arising under Directive 96/9/EC of the 49 | European Parliament and of the Council of 11 March 1996 on the legal 50 | protection of databases, and under any national implementation thereof, 51 | including any amended or successor version of such directive); and 52 | 53 | vii. other similar, equivalent or corresponding rights throughout the world 54 | based on applicable law or treaty, and any national implementations thereof. 55 | 56 | 2. Waiver. To the greatest extent permitted by, but not in contravention of, 57 | applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and 58 | unconditionally waives, abandons, and surrenders all of Affirmer's Copyright 59 | and Related Rights and associated claims and causes of action, whether now 60 | known or unknown (including existing as well as future claims and causes of 61 | action), in the Work (i) in all territories worldwide, (ii) for the maximum 62 | duration provided by applicable law or treaty (including future time 63 | extensions), (iii) in any current or future medium and for any number of 64 | copies, and (iv) for any purpose whatsoever, including without limitation 65 | commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes 66 | the Waiver for the benefit of each member of the public at large and to the 67 | detriment of Affirmer's heirs and successors, fully intending that such Waiver 68 | shall not be subject to revocation, rescission, cancellation, termination, or 69 | any other legal or equitable action to disrupt the quiet enjoyment of the Work 70 | by the public as contemplated by Affirmer's express Statement of Purpose. 71 | 72 | 3. Public License Fallback. Should any part of the Waiver for any reason be 73 | judged legally invalid or ineffective under applicable law, then the Waiver 74 | shall be preserved to the maximum extent permitted taking into account 75 | Affirmer's express Statement of Purpose. In addition, to the extent the Waiver 76 | is so judged Affirmer hereby grants to each affected person a royalty-free, 77 | non transferable, non sublicensable, non exclusive, irrevocable and 78 | unconditional license to exercise Affirmer's Copyright and Related Rights in 79 | the Work (i) in all territories worldwide, (ii) for the maximum duration 80 | provided by applicable law or treaty (including future time extensions), (iii) 81 | in any current or future medium and for any number of copies, and (iv) for any 82 | purpose whatsoever, including without limitation commercial, advertising or 83 | promotional purposes (the "License"). The License shall be deemed effective as 84 | of the date CC0 was applied by Affirmer to the Work. Should any part of the 85 | License for any reason be judged legally invalid or ineffective under 86 | applicable law, such partial invalidity or ineffectiveness shall not 87 | invalidate the remainder of the License, and in such case Affirmer hereby 88 | affirms that he or she will not (i) exercise any of his or her remaining 89 | Copyright and Related Rights in the Work or (ii) assert any associated claims 90 | and causes of action with respect to the Work, in either case contrary to 91 | Affirmer's express Statement of Purpose. 92 | 93 | 4. Limitations and Disclaimers. 94 | 95 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 96 | surrendered, licensed or otherwise affected by this document. 97 | 98 | b. Affirmer offers the Work as-is and makes no representations or warranties 99 | of any kind concerning the Work, express, implied, statutory or otherwise, 100 | including without limitation warranties of title, merchantability, fitness 101 | for a particular purpose, non infringement, or the absence of latent or 102 | other defects, accuracy, or the present or absence of errors, whether or not 103 | discoverable, all to the greatest extent permissible under applicable law. 104 | 105 | c. Affirmer disclaims responsibility for clearing rights of other persons 106 | that may apply to the Work or any use thereof, including without limitation 107 | any person's Copyright and Related Rights in the Work. Further, Affirmer 108 | disclaims responsibility for obtaining any necessary consents, permissions 109 | or other rights required for any use of the Work. 110 | 111 | d. Affirmer understands and acknowledges that Creative Commons is not a 112 | party to this document and has no duty or obligation with respect to this 113 | CC0 or use of the Work. 114 | 115 | For more information, please see 116 | 117 | 118 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Introduction to Camera Trap Data Management and Analysis in R 2 | 3 | An introduction to analysing standardised camera trap data from the [Wildlife Coexistence Lab, UBC](https://wildlife.forestry.ubc.ca/) and the [WildCAM network](https://wildcams.ca/). 4 | 5 | For the current document deployment see https://wildcolab.github.io/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/index.html 6 | -------------------------------------------------------------------------------- /WildCo_Data_Analysis.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Website 16 | -------------------------------------------------------------------------------- /_bookdown.yml: -------------------------------------------------------------------------------- 1 | book_filename: "WildCo: An Introduction to Camera Trap Data Management and Analysis in R" 2 | language: 3 | ui: 4 | chapter_name: "Chapter " 5 | delete_merged_file: true 6 | output: 7 | bookdown::gitbook: 8 | theme: cosmo 9 | highlight: tango 10 | output_format: html 11 | output_dir: "docs" 12 | -------------------------------------------------------------------------------- /_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -ev 4 | 5 | Rscript -e "bookdown::render_book('index.Rmd', 'bookdown::gitbook')" 6 | Rscript -e "bookdown::render_book('index.Rmd', 'bookdown::pdf_book')" 7 | Rscript -e "bookdown::render_book('index.Rmd', 'bookdown::epub_book')" 8 | 9 | -------------------------------------------------------------------------------- /_deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | [ -z "${GITHUB_PAT}" ] && exit 0 6 | [ "${TRAVIS_BRANCH}" != "master" ] && exit 0 7 | 8 | git config --global user.email "xie@yihui.name" 9 | git config --global user.name "Yihui Xie" 10 | 11 | git clone -b gh-pages https://${GITHUB_PAT}@github.com/${TRAVIS_REPO_SLUG}.git book-output 12 | cd book-output 13 | cp -r ../_book/* ./ 14 | git add --all * 15 | git commit -m"Update the book" || true 16 | git push -q origin gh-pages 17 | -------------------------------------------------------------------------------- /_output.yml: -------------------------------------------------------------------------------- 1 | bookdown::gitbook: 2 | css: style.css 3 | config: 4 | toc: 5 | before: | 6 |
  • WildCo Data Analysis Intro
  • 7 | after: | 8 |
  • Published with bookdown
  • 9 | edit: https://github.com/rstudio/bookdown-demo/edit/master/%s 10 | download: ["pdf", "epub"] 11 | bookdown::pdf_book: 12 | includes: 13 | in_header: preamble.tex 14 | latex_engine: xelatex 15 | citation_package: natbib 16 | keep_tex: yes 17 | bookdown::epub_book: default 18 | -------------------------------------------------------------------------------- /book.bib: -------------------------------------------------------------------------------- 1 | @Book{xie2015, 2 | title = {Dynamic Documents with {R} and knitr}, 3 | author = {Yihui Xie}, 4 | publisher = {Chapman and Hall/CRC}, 5 | address = {Boca Raton, Florida}, 6 | year = {2015}, 7 | edition = {2nd}, 8 | note = {ISBN 978-1498716963}, 9 | url = {http://yihui.name/knitr/}, 10 | } 11 | -------------------------------------------------------------------------------- /data/raw_data/AlgarRestorationProject_raw_species_list.csv: -------------------------------------------------------------------------------- 1 | "","class","order","family","genus","species","sp","common_name" 2 | "1","Aves","Galliformes","Phasianidae","Tympanuchus","phasianellus","Tympanuchus.phasianellus","sharp-tailed grouse" 3 | "2","Aves","Gruiformes","Gruidae","Grus","canadensis","Grus.canadensis","sandhill crane" 4 | "3","Aves","Passeriformes","Corvidae","Corvus","corax","Corvus.corax","common raven" 5 | "4","Aves","Passeriformes","Corvidae","Perisoreus","canadensis","Perisoreus.canadensis","canada jay" 6 | "5","Aves","Strigiformes","Strigidae","Strix","nebulosa","Strix.nebulosa","great grey owl" 7 | "6","Mammalia","Artiodactyla","Cervidae","Alces","alces","Alces.alces","moose" 8 | "7","Mammalia","Artiodactyla","Cervidae","Cervus","canadensis","Cervus.canadensis","elk" 9 | "8","Mammalia","Artiodactyla","Cervidae","Odocoileus","virginianus","Odocoileus.virginianus","white-tailed deer" 10 | "9","Mammalia","Artiodactyla","Cervidae","Rangifer","tarandus","Rangifer.tarandus","caribou" 11 | "10","Mammalia","Carnivora","Canidae","Canis","latrans","Canis.latrans","coyote" 12 | "11","Mammalia","Carnivora","Canidae","Canis","lupus","Canis.lupus","gray wolf" 13 | "12","Mammalia","Carnivora","Canidae","Vulpes","vulpes","Vulpes.vulpes","red fox" 14 | "13","Mammalia","Carnivora","Felidae","Lynx","canadensis","Lynx.canadensis","canada lynx" 15 | "14","Mammalia","Carnivora","Mustelidae","Lontra","canadensis","Lontra.canadensis","river otter" 16 | "15","Mammalia","Carnivora","Mustelidae","Martes","americana","Martes.americana","american marten" 17 | "16","Mammalia","Carnivora","Ursidae","Ursus","americanus","Ursus.americanus","black bear" 18 | "17","Mammalia","Lagomorpha","Leporidae","Lepus","americanus","Lepus.americanus","snowshoe hare" 19 | "18","Mammalia","Lagomorpha","Leporidae","Oryctolagus","cuniculus","Oryctolagus.cuniculus","rabbit" 20 | "19","Mammalia","Primates","Hominidae","Homo","sapiens","Homo.sapiens","human" 21 | "20","Mammalia","Rodentia","Sciuridae","Tamiasciurus","hudsonicus","Tamiasciurus.hudsonicus","red squirrel" 22 | "21",NA,NA,NA,"","blank",".blank",NA 23 | "22",NA,NA,NA,"","spp.",".spp.",NA 24 | "23",NA,NA,NA,"Canachites","canadensis","Canachites.canadensis","spruce grouse" 25 | "24",NA,NA,NA,"Unknown","unknown","Unknown.unknown",NA 26 | "25",NA,NA,NA,"Weasel","spp.","Weasel.spp.",NA 27 | -------------------------------------------------------------------------------- /data/raw_data/Los_Amigos_Camera_Trapping_raw_species_list.csv: -------------------------------------------------------------------------------- 1 | "","class","order","family","genus","species","common_name","sp" 2 | "1879","Aves","","","","","Bird","." 3 | "412","Aves","Galliformes","Cracidae","Mitu","tuberosum","Razor-billed Curassow","Mitu.tuberosum" 4 | "362","Aves","Galliformes","Cracidae","Penelope","jacquacu","Spix's Guan","Penelope.jacquacu" 5 | "549","Aves","Galliformes","Cracidae","Pipile","cumanensis","Blue-throated Piping-Guan","Pipile.cumanensis" 6 | "1","Aves","Gruiformes","Psophiidae","Psophia","leucoptera","White-winged Trumpeter","Psophia.leucoptera" 7 | "736","Mammalia","Carnivora","Canidae","Atelocynus","microtis","Short-eared Dog","Atelocynus.microtis" 8 | "24674","Mammalia","Carnivora","Canidae","Speothos","venaticus","Bush Dog","Speothos.venaticus" 9 | "13472","Mammalia","Carnivora","Felidae","Herpailurus","yagouaroundi","Jaguarundi","Herpailurus.yagouaroundi" 10 | "518","Mammalia","Carnivora","Felidae","Leopardus","pardalis","Ocelot","Leopardus.pardalis" 11 | "16828","Mammalia","Carnivora","Felidae","Leopardus","wiedii","Margay","Leopardus.wiedii" 12 | "1258","Mammalia","Carnivora","Felidae","Panthera","onca","Jaguar","Panthera.onca" 13 | "483","Mammalia","Carnivora","Felidae","Puma","concolor","Puma","Puma.concolor" 14 | "765","Mammalia","Carnivora","Mustelidae","Eira","barbara","Tayra","Eira.barbara" 15 | "93129","Mammalia","Carnivora","Mustelidae","Galictis","vittata","Greater Grison","Galictis.vittata" 16 | "121663","Mammalia","Carnivora","Mustelidae","Pteronura","brasiliensis","Giant Otter","Pteronura.brasiliensis" 17 | "613","Mammalia","Carnivora","Procyonidae","Nasua","nasua","South American Coati","Nasua.nasua" 18 | "24","Mammalia","Cetartiodactyla","Cervidae","Mazama","americana","Red Brocket","Mazama.americana" 19 | "17","Mammalia","Cetartiodactyla","Cervidae","Mazama","nemorivaga","Amazonian Brown Brocket","Mazama.nemorivaga" 20 | "3","Mammalia","Cetartiodactyla","Tayassuidae","Pecari","tajacu","Collared Peccary","Pecari.tajacu" 21 | "120941","Mammalia","Cetartiodactyla","Tayassuidae","Tayassu","pecari","White-lipped Peccary","Tayassu.pecari" 22 | "1966","Mammalia","Cingulata","Chlamyphoridae","Priodontes","maximus","Giant Armadillo","Priodontes.maximus" 23 | "1397","Mammalia","Cingulata","Dasypodidae","Dasypus","","Dasypus Species","Dasypus." 24 | "494","Mammalia","Didelphimorphia","Didelphidae","Didelphis","marsupialis","Common Opossum","Didelphis.marsupialis" 25 | "105","Mammalia","Perissodactyla","Tapiridae","Tapirus","terrestris","Lowland Tapir","Tapirus.terrestris" 26 | "457","Mammalia","Pilosa","Myrmecophagidae","Myrmecophaga","tridactyla","Giant Anteater","Myrmecophaga.tridactyla" 27 | "2209","Mammalia","Pilosa","Myrmecophagidae","Tamandua","tetradactyla","Southern Tamandua","Tamandua.tetradactyla" 28 | "3055","Mammalia","Primates","Hominidae","Homo","sapiens","Human-Camera Trapper","Homo.sapiens" 29 | "740","Mammalia","Rodentia","","","","Rodent","." 30 | "465","Mammalia","Rodentia","Cuniculidae","Cuniculus","paca","Spotted Paca","Cuniculus.paca" 31 | "2","Mammalia","Rodentia","Dasyproctidae","Dasyprocta","variegata","Brown Agouti","Dasyprocta.variegata" 32 | "1368","Mammalia","Rodentia","Dinomyidae","Dinomys","branickii","Pacarana","Dinomys.branickii" 33 | "1679","No CV Result","No CV Result","No CV Result","No CV Result","No CV Result","No CV Result","No CV Result.No CV Result" 34 | -------------------------------------------------------------------------------- /data/raw_data/example_covariates/LCC_codes.csv: -------------------------------------------------------------------------------- 1 | discrete_classification,color,description,hab_code 2 | 0,282828,Unknown. No or not enough satellite data available.,unknown 3 | 20,FFBB22,Shrubs. Woody perennial plants with persistent and woody stems and without any defined main stem being less than 5 m tall. The shrub foliage can be either evergreen or deciduous.,shrubs 4 | 30,FFFF4C,Herbaceous vegetation. Plants without persistent stem or shoots above ground and lacking definite firm structure. Tree and shrub cover is less than 10 %.,herbs 5 | 40,F096FF,"Cultivated and managed vegetation / agriculture. Lands covered with temporary crops followed by harvest and a bare soil period (e.g., single and multiple cropping systems). Note that perennial woody crops will be classified as the appropriate forest or shrub land cover type.",cultivated 6 | 50,FA0000,Urban / built up. Land covered by buildings and other man-made structures.,urban 7 | 60,B4B4B4,"Bare / sparse vegetation. Lands with exposed soil, sand, or rocks and never has more than 10 % vegetated cover during any time of the year.",bare 8 | 70,F0F0F0,Snow and ice. Lands under snow or ice cover throughout the year.,snow 9 | 80,0032C8,"Permanent water bodies. Lakes, reservoirs, and rivers. Can be either fresh or salt-water bodies.",water 10 | 90,0096A0,"Herbaceous wetland. Lands with a permanent mixture of water and herbaceous or woody vegetation. The vegetation can be present in either salt, brackish, or fresh water.",herbaceous_wetland 11 | 100,FAE6A0,Moss and lichen.,moss_and_lichen 12 | 111,58481F,"Closed forest, evergreen needle leaf. Tree canopy >70 %, almost all needle leaf trees remain green all year. Canopy is never without green foliage.",closed_forest_evergreen_needle 13 | 112,9900,"Closed forest, evergreen broad leaf. Tree canopy >70 %, almost all broadleaf trees remain green year round. Canopy is never without green foliage.",closed_forest_evergreen_broad 14 | 113,70663E,"Closed forest, deciduous needle leaf. Tree canopy >70 %, consists of seasonal needle leaf tree communities with an annual cycle of leaf-on and leaf-off periods.",closed_forest_deciduous_needle 15 | 114,00CC00,"Closed forest, deciduous broad leaf. Tree canopy >70 %, consists of seasonal broadleaf tree communities with an annual cycle of leaf-on and leaf-off periods.",closed_forest_deciduous_broad 16 | 115,4E751F,"Closed forest, mixed.",mixed_forest 17 | 116,7800,"Closed forest, not matching any of the other definitions.",closed_forest_other 18 | 121,666000,"Open forest, evergreen needle leaf. Top layer- trees 15-70 % and second layer- mixed of shrubs and grassland, almost all needle leaf trees remain green all year. Canopy is never without green foliage.",open_forest_evergreen_needle 19 | 122,8DB400,"Open forest, evergreen broad leaf. Top layer- trees 15-70 % and second layer- mixed of shrubs and grassland, almost all broadleaf trees remain green year round. Canopy is never without green foliage.",open_forest_evergreen_broad 20 | 123,8D7400,"Open forest, deciduous needle leaf. Top layer- trees 15-70 % and second layer- mixed of shrubs and grassland, consists of seasonal needle leaf tree communities with an annual cycle of leaf-on and leaf-off periods.",closed_forest_deciduous_needle 21 | 124,A0DC00,"Open forest, deciduous broad leaf. Top layer- trees 15-70 % and second layer- mixed of shrubs and grassland, consists of seasonal broadleaf tree communities with an annual cycle of leaf-on and leaf-off periods.",closed_forest_deciduous_broad 22 | 125,929900,"Open forest, mixed.",open_forest 23 | 126,648C00,"Open forest, not matching any of the other definitions.",open_forest_other 24 | 200,80,"Oceans, seas. Can be either fresh or salt-water bodies.",ocean 25 | -------------------------------------------------------------------------------- /data/raw_data/example_covariates/example_dataframe.csv: -------------------------------------------------------------------------------- 1 | placename,line_of_sight_m 2 | ALG001,137.125 3 | ALG002,131.5277778 4 | ALG003,353.6583333 5 | ALG004,158.0416667 6 | ALG005,305.8194444 7 | ALG006,60.125 8 | ALG007,310.5833333 9 | ALG008,112.75 10 | ALG009,299.0277778 11 | ALG010,102.3055556 12 | ALG011,223.5694444 13 | ALG012,140.9166667 14 | ALG013,394.5694444 15 | ALG014,196.875 16 | ALG015,163.1111111 17 | ALG016,116.1111111 18 | ALG017,138.1944444 19 | ALG018,304.2916667 20 | ALG019,330.9722222 21 | ALG020,204.4027778 22 | ALG021,264.9444444 23 | ALG022,229.1388889 24 | ALG023,218.2916667 25 | ALG024,425.4305556 26 | ALG025,56.97222222 27 | ALG026,200.0555556 28 | ALG027,252.9583333 29 | ALG028,277.5 30 | ALG029,206.5277778 31 | ALG030,43.38888889 32 | ALG031,334.2777778 33 | ALG032,83 34 | ALG033,165 35 | ALG034,337.7916667 36 | ALG035,439.6111111 37 | ALG036,62.69444444 38 | ALG037,392.6111111 39 | ALG038,352.75 40 | ALG039,339.7638889 41 | ALG040,182.75 42 | ALG041,109.25 43 | ALG042,219.5694444 44 | ALG043,62.41666667 45 | ALG044,374.2638889 46 | ALG045,294.8333333 47 | ALG046,34.5 48 | ALG047,363.8055556 49 | ALG048,392.9305556 50 | ALG049,80.77777778 51 | ALG050,139.3611111 52 | ALG051,208.125 53 | ALG052,13.95833333 54 | ALG053,99.30555556 55 | ALG054,35.47222222 56 | ALG055,189.0694444 57 | ALG056,55.41666667 58 | ALG057,93.04166667 59 | ALG058,80.91666667 60 | ALG059,41 61 | ALG060,189.9027778 62 | ALG061,11.16666667 63 | ALG062,16 64 | ALG063,28.94444444 65 | ALG064,34.27777778 66 | ALG065,20.11111111 67 | ALG066,36.94444444 68 | ALG067,188.8333333 69 | ALG068,72.27777778 70 | ALG069,22.85555556 71 | ALG070,28.61111111 72 | ALG071,20.05555556 73 | ALG072,52.44444444 74 | ALG073,55.94444444 75 | -------------------------------------------------------------------------------- /data/raw_data/example_covariates/example_raster.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/data/raw_data/example_covariates/example_raster.tif -------------------------------------------------------------------------------- /data/raw_data/example_covariates/example_speed.csv: -------------------------------------------------------------------------------- 1 | "common_name","mean","sd" 2 | "Tapeti",0.243070234113712,0.0726648840022061 3 | "Grey Tinamou",1.06359671109823,0.792155395118197 4 | "Southern Tamandua",1.27781632653061,0.791154846571545 5 | "Razor-billed Curassow",1.29453040843011,0.996600629724198 6 | "White-winged Trumpeter",1.62936744228814,1.50643465055294 7 | "Capybara",1.8,NA 8 | "Margay",1.92243109243697,1.30368771636144 9 | "Brown Agouti",1.96079561870599,1.50770358853429 10 | "South American Coati",1.97115043072869,1.96362079082695 11 | "Spotted Paca",2.05460554870376,1.71457827791246 12 | "Red Brocket",2.06652077073243,1.4686092675924 13 | "Amazonian Brown Brocket",2.10625112190114,1.62831548371387 14 | "Giant Anteater",2.12748038961039,1.0191554171987 15 | "Common Opossum",2.12934850229362,1.87547553962688 16 | "Collared Peccary",2.14391198752141,1.68789069657959 17 | "Great Tinamou",2.205,0.878226622233692 18 | "Dasypus Species",2.51129655561829,1.90750541908221 19 | "Ocelot",2.78014274008886,1.74223277652315 20 | "Puma",2.90349165770815,1.71711752863908 21 | "Lowland Tapir",2.92103098130064,2.41966519843762 22 | "Green Acouchi",3,NA 23 | "Tayra",3.03463903560554,2.9070681287131 24 | "Giant Armadillo",3.49369714285714,2.06711679633674 25 | "Jaguar",3.51636686507937,1.6400770780589 26 | "Short-eared Dog",3.54373053020499,2.11935942609202 27 | "White-lipped Peccary",4.4568,3.77764726781101 28 | "Jaguarundi",4.56714,3.39360424858291 29 | "Greater Grison",4.734,3.64018570954835 30 | "Crab-eating Raccoon",4.75875,1.30727129931013 31 | "Giant Otter",9.72,NA 32 | -------------------------------------------------------------------------------- /data/raw_data/example_data/common_names.csv: -------------------------------------------------------------------------------- 1 | common_name,sp 2 | sharp-tailed grouse,Tympanuchus.phasianellus 3 | sandhill crane,Grus.canadensis 4 | common raven,Corvus.corax 5 | canada jay,Perisoreus.canadensis 6 | great grey owl,Strix.nebulosa 7 | moose,Alces.alces 8 | elk,Cervus.canadensis 9 | white-tailed deer,Odocoileus.virginianus 10 | caribou,Rangifer.tarandus 11 | coyote,Canis.latrans 12 | gray wolf,Canis.lupus 13 | red fox,Vulpes.vulpes 14 | canada lynx,Lynx.canadensis 15 | river otter,Lontra.canadensis 16 | american marten,Martes.americana 17 | black bear,Ursus.americanus 18 | snowshoe hare,Lepus.americanus 19 | rabbit,Oryctolagus.cuniculus 20 | human,Homo.sapiens 21 | red squirrel,Tamiasciurus.hudsonicus 22 | NA,blank 23 | NA,spp. 24 | spruce grouse,Canachites.canadensis 25 | NA,Unknown.unknown 26 | NA,Weasel.spp. 27 | -------------------------------------------------------------------------------- /data/raw_data/example_data/projects.csv: -------------------------------------------------------------------------------- 1 | "project_id","project_name","project_short_name","project_objectives","project_species","project_species_individual","project_sensor_layout","project_sensor_layout_targeted_type","project_bait_use","project_bait_type","project_stratification","project_stratification_type","project_sensor_method","project_individual_animals","project_blank_images","project_sensor_cluster","project_admin","project_admin_email","project_admin_organization","country_code","embargo","initiative_id","metadata_license","image_license","data_citation","count_optional","project_type" 2 | "AlgarRestorationProject","AlgarRestorationProject","Algar","Investigate medium-large bodied mammal habitat use in response to human recreation spatially and temporally","","","Stratified","Seismic lines","No","","Seismic line restoration treatements and controls","Offline, HumanUse","Sensor.Detection","","yes","","Cole Burton","cole.burton@ubc.ca","University of British Columbia","","","","","","Beirne, Christopher, Catherine Sun, Erin R. Tattersall, Joanna M. Burgar, Jason T. Fisher, and A. Cole Burton. Multispecies modelling reveals potential for habitat restoration to re‐establish boreal vertebrate community dynamics. Journal of Applied Ecology 58, no. 12 (2021): 2821-2832.","no","image" 3 | -------------------------------------------------------------------------------- /data/raw_data/your_data/projects.csv: -------------------------------------------------------------------------------- 1 | "project_id","project_name","project_short_name","project_objectives","project_species","project_species_individual","project_sensor_layout","project_sensor_layout_targeted_type","project_bait_use","project_bait_type","project_stratification","project_stratification_type","project_sensor_method","project_individual_animals","project_blank_images","project_sensor_cluster","project_admin","project_admin_email","project_admin_organization","country_code","embargo","initiative_id","metadata_license","image_license","data_citation","count_optional","project_type" 2 | 2003269,"Los_Amigos_Camera_Trapping","Los Amigos","Census medium-to-large vertebrate community ","Multiple",NA,"Systematic",NA,"Some","None","Yes",NA,"Both","No","No","Yes","Christopher Beirne","c.w.beirne@gmail.com","Osa_Conservation","PER",48,NA,"CC-BY","CC-BY-NC","Andrew Whitworth; Arianna Bastoe; Eleanor Flatt; Christopher Beirne; Flor Maria Perez Mullisaca; Rosio Vega. 2021. Last updated November 2024. Los_Amigos_Camera_Trapping. http://n2t.net/ark:/63614/w12003269. Accessed via wildlifeinsights.org on 2025-01-03.","false","Image" 3 | -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_17-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_17-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_18-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_18-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_20-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_20-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_22-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_22-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_24-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_24-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_29-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_29-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_30-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_30-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_33-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_33-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_54-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_54-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_59-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_59-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch4_60-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch4_60-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_14-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_14-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_15-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_15-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_15-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_15-2.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_15-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_15-3.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_15-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_15-4.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_17-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_17-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_21-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_21-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_22-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_22-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_23-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_23-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_25-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_25-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch5_26-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch5_26-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_14-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_14-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_16-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_16-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_19-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_19-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_21-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_21-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_21b-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_21b-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_22-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_22-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_23-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_23-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch6_25-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch6_25-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_12-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_12-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_13-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_13-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_16-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_16-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_22-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_22-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_28-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_28-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_29-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_29-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_40-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_40-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_41-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_41-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_5-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_5-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch7_9-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch7_9-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch8_20-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch8_20-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch9_10-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch9_10-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch9_11-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch9_11-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch9_15-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch9_15-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch9_7-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch9_7-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/ch9_8-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/ch9_8-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-19-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-19-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-20-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-20-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-22-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-22-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-23-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-23-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-25-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-25-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-26-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-26-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-29-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-29-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-3-1.png -------------------------------------------------------------------------------- /docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-6-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/Intro-to-data-analysis_files/figure-html/unnamed-chunk-6-1.png -------------------------------------------------------------------------------- /docs/images/Ninja.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/Ninja.png -------------------------------------------------------------------------------- /docs/images/analysis_covariates/rgee_check.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/analysis_covariates/rgee_check.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/AsyEst.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/AsyEst.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/coverage_based.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/coverage_based.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/data_format.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/data_format.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/data_info.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/data_info.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/size_based.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/size_based.PNG -------------------------------------------------------------------------------- /docs/images/community_metrics/unit_based.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/community_metrics/unit_based.png -------------------------------------------------------------------------------- /docs/images/course-preparation/New_directory.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/New_directory.png -------------------------------------------------------------------------------- /docs/images/course-preparation/R_studio_start.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/R_studio_start.PNG -------------------------------------------------------------------------------- /docs/images/course-preparation/click_download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/click_download.png -------------------------------------------------------------------------------- /docs/images/course-preparation/copy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/copy.png -------------------------------------------------------------------------------- /docs/images/course-preparation/file_window.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/file_window.PNG -------------------------------------------------------------------------------- /docs/images/course-preparation/final_files.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/final_files.PNG -------------------------------------------------------------------------------- /docs/images/course-preparation/new_project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/new_project.png -------------------------------------------------------------------------------- /docs/images/course-preparation/new_project_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/new_project_2.png -------------------------------------------------------------------------------- /docs/images/course-preparation/paste_files.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/paste_files.PNG -------------------------------------------------------------------------------- /docs/images/course-preparation/project_name.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/course-preparation/project_name.png -------------------------------------------------------------------------------- /docs/images/data_creation/cap_matrix.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/data_creation/cap_matrix.PNG -------------------------------------------------------------------------------- /docs/images/data_creation/effort_lookup.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/data_creation/effort_lookup.PNG -------------------------------------------------------------------------------- /docs/images/data_creation/ind_dat.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/data_creation/ind_dat.PNG -------------------------------------------------------------------------------- /docs/images/exploration/project_your_locations.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/exploration/project_your_locations.PNG -------------------------------------------------------------------------------- /docs/images/preprocessing/folder_structure.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/preprocessing/folder_structure.PNG -------------------------------------------------------------------------------- /docs/images/preprocessing/function.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/preprocessing/function.png -------------------------------------------------------------------------------- /docs/images/preprocessing/human_blur.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/preprocessing/human_blur.jpg -------------------------------------------------------------------------------- /docs/images/preprocessing/labelers.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/preprocessing/labelers.PNG -------------------------------------------------------------------------------- /docs/images/preprocessing/renamer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/images/preprocessing/renamer.png -------------------------------------------------------------------------------- /docs/libs/Proj4Leaflet-1.0.1/proj4leaflet.js: -------------------------------------------------------------------------------- 1 | (function (factory) { 2 | var L, proj4; 3 | if (typeof define === 'function' && define.amd) { 4 | // AMD 5 | define(['leaflet', 'proj4'], factory); 6 | } else if (typeof module === 'object' && typeof module.exports === "object") { 7 | // Node/CommonJS 8 | L = require('leaflet'); 9 | proj4 = require('proj4'); 10 | module.exports = factory(L, proj4); 11 | } else { 12 | // Browser globals 13 | if (typeof window.L === 'undefined' || typeof window.proj4 === 'undefined') 14 | throw 'Leaflet and proj4 must be loaded first'; 15 | factory(window.L, window.proj4); 16 | } 17 | }(function (L, proj4) { 18 | if (proj4.__esModule && proj4.default) { 19 | // If proj4 was bundled as an ES6 module, unwrap it to get 20 | // to the actual main proj4 object. 21 | // See discussion in https://github.com/kartena/Proj4Leaflet/pull/147 22 | proj4 = proj4.default; 23 | } 24 | 25 | L.Proj = {}; 26 | 27 | L.Proj._isProj4Obj = function(a) { 28 | return (typeof a.inverse !== 'undefined' && 29 | typeof a.forward !== 'undefined'); 30 | }; 31 | 32 | L.Proj.Projection = L.Class.extend({ 33 | initialize: function(code, def, bounds) { 34 | var isP4 = L.Proj._isProj4Obj(code); 35 | this._proj = isP4 ? code : this._projFromCodeDef(code, def); 36 | this.bounds = isP4 ? def : bounds; 37 | }, 38 | 39 | project: function (latlng) { 40 | var point = this._proj.forward([latlng.lng, latlng.lat]); 41 | return new L.Point(point[0], point[1]); 42 | }, 43 | 44 | unproject: function (point, unbounded) { 45 | var point2 = this._proj.inverse([point.x, point.y]); 46 | return new L.LatLng(point2[1], point2[0], unbounded); 47 | }, 48 | 49 | _projFromCodeDef: function(code, def) { 50 | if (def) { 51 | proj4.defs(code, def); 52 | } else if (proj4.defs[code] === undefined) { 53 | var urn = code.split(':'); 54 | if (urn.length > 3) { 55 | code = urn[urn.length - 3] + ':' + urn[urn.length - 1]; 56 | } 57 | if (proj4.defs[code] === undefined) { 58 | throw 'No projection definition for code ' + code; 59 | } 60 | } 61 | 62 | return proj4(code); 63 | } 64 | }); 65 | 66 | L.Proj.CRS = L.Class.extend({ 67 | includes: L.CRS, 68 | 69 | options: { 70 | transformation: new L.Transformation(1, 0, -1, 0) 71 | }, 72 | 73 | initialize: function(a, b, c) { 74 | var code, 75 | proj, 76 | def, 77 | options; 78 | 79 | if (L.Proj._isProj4Obj(a)) { 80 | proj = a; 81 | code = proj.srsCode; 82 | options = b || {}; 83 | 84 | this.projection = new L.Proj.Projection(proj, options.bounds); 85 | } else { 86 | code = a; 87 | def = b; 88 | options = c || {}; 89 | this.projection = new L.Proj.Projection(code, def, options.bounds); 90 | } 91 | 92 | L.Util.setOptions(this, options); 93 | this.code = code; 94 | this.transformation = this.options.transformation; 95 | 96 | if (this.options.origin) { 97 | this.transformation = 98 | new L.Transformation(1, -this.options.origin[0], 99 | -1, this.options.origin[1]); 100 | } 101 | 102 | if (this.options.scales) { 103 | this._scales = this.options.scales; 104 | } else if (this.options.resolutions) { 105 | this._scales = []; 106 | for (var i = this.options.resolutions.length - 1; i >= 0; i--) { 107 | if (this.options.resolutions[i]) { 108 | this._scales[i] = 1 / this.options.resolutions[i]; 109 | } 110 | } 111 | } 112 | 113 | this.infinite = !this.options.bounds; 114 | 115 | }, 116 | 117 | scale: function(zoom) { 118 | var iZoom = Math.floor(zoom), 119 | baseScale, 120 | nextScale, 121 | scaleDiff, 122 | zDiff; 123 | if (zoom === iZoom) { 124 | return this._scales[zoom]; 125 | } else { 126 | // Non-integer zoom, interpolate 127 | baseScale = this._scales[iZoom]; 128 | nextScale = this._scales[iZoom + 1]; 129 | scaleDiff = nextScale - baseScale; 130 | zDiff = (zoom - iZoom); 131 | return baseScale + scaleDiff * zDiff; 132 | } 133 | }, 134 | 135 | zoom: function(scale) { 136 | // Find closest number in this._scales, down 137 | var downScale = this._closestElement(this._scales, scale), 138 | downZoom = this._scales.indexOf(downScale), 139 | nextScale, 140 | nextZoom, 141 | scaleDiff; 142 | // Check if scale is downScale => return array index 143 | if (scale === downScale) { 144 | return downZoom; 145 | } 146 | if (downScale === undefined) { 147 | return -Infinity; 148 | } 149 | // Interpolate 150 | nextZoom = downZoom + 1; 151 | nextScale = this._scales[nextZoom]; 152 | if (nextScale === undefined) { 153 | return Infinity; 154 | } 155 | scaleDiff = nextScale - downScale; 156 | return (scale - downScale) / scaleDiff + downZoom; 157 | }, 158 | 159 | distance: L.CRS.Earth.distance, 160 | 161 | R: L.CRS.Earth.R, 162 | 163 | /* Get the closest lowest element in an array */ 164 | _closestElement: function(array, element) { 165 | var low; 166 | for (var i = array.length; i--;) { 167 | if (array[i] <= element && (low === undefined || low < array[i])) { 168 | low = array[i]; 169 | } 170 | } 171 | return low; 172 | } 173 | }); 174 | 175 | L.Proj.GeoJSON = L.GeoJSON.extend({ 176 | initialize: function(geojson, options) { 177 | this._callLevel = 0; 178 | L.GeoJSON.prototype.initialize.call(this, geojson, options); 179 | }, 180 | 181 | addData: function(geojson) { 182 | var crs; 183 | 184 | if (geojson) { 185 | if (geojson.crs && geojson.crs.type === 'name') { 186 | crs = new L.Proj.CRS(geojson.crs.properties.name); 187 | } else if (geojson.crs && geojson.crs.type) { 188 | crs = new L.Proj.CRS(geojson.crs.type + ':' + geojson.crs.properties.code); 189 | } 190 | 191 | if (crs !== undefined) { 192 | this.options.coordsToLatLng = function(coords) { 193 | var point = L.point(coords[0], coords[1]); 194 | return crs.projection.unproject(point); 195 | }; 196 | } 197 | } 198 | 199 | // Base class' addData might call us recursively, but 200 | // CRS shouldn't be cleared in that case, since CRS applies 201 | // to the whole GeoJSON, inluding sub-features. 202 | this._callLevel++; 203 | try { 204 | L.GeoJSON.prototype.addData.call(this, geojson); 205 | } finally { 206 | this._callLevel--; 207 | if (this._callLevel === 0) { 208 | delete this.options.coordsToLatLng; 209 | } 210 | } 211 | } 212 | }); 213 | 214 | L.Proj.geoJson = function(geojson, options) { 215 | return new L.Proj.GeoJSON(geojson, options); 216 | }; 217 | 218 | L.Proj.ImageOverlay = L.ImageOverlay.extend({ 219 | initialize: function (url, bounds, options) { 220 | L.ImageOverlay.prototype.initialize.call(this, url, null, options); 221 | this._projectedBounds = bounds; 222 | }, 223 | 224 | // Danger ahead: Overriding internal methods in Leaflet. 225 | // Decided to do this rather than making a copy of L.ImageOverlay 226 | // and doing very tiny modifications to it. 227 | // Future will tell if this was wise or not. 228 | _animateZoom: function (event) { 229 | var scale = this._map.getZoomScale(event.zoom); 230 | var northWest = L.point(this._projectedBounds.min.x, this._projectedBounds.max.y); 231 | var offset = this._projectedToNewLayerPoint(northWest, event.zoom, event.center); 232 | 233 | L.DomUtil.setTransform(this._image, offset, scale); 234 | }, 235 | 236 | _reset: function () { 237 | var zoom = this._map.getZoom(); 238 | var pixelOrigin = this._map.getPixelOrigin(); 239 | var bounds = L.bounds( 240 | this._transform(this._projectedBounds.min, zoom)._subtract(pixelOrigin), 241 | this._transform(this._projectedBounds.max, zoom)._subtract(pixelOrigin) 242 | ); 243 | var size = bounds.getSize(); 244 | 245 | L.DomUtil.setPosition(this._image, bounds.min); 246 | this._image.style.width = size.x + 'px'; 247 | this._image.style.height = size.y + 'px'; 248 | }, 249 | 250 | _projectedToNewLayerPoint: function (point, zoom, center) { 251 | var viewHalf = this._map.getSize()._divideBy(2); 252 | var newTopLeft = this._map.project(center, zoom)._subtract(viewHalf)._round(); 253 | var topLeft = newTopLeft.add(this._map._getMapPanePos()); 254 | 255 | return this._transform(point, zoom)._subtract(topLeft); 256 | }, 257 | 258 | _transform: function (point, zoom) { 259 | var crs = this._map.options.crs; 260 | var transformation = crs.transformation; 261 | var scale = crs.scale(zoom); 262 | 263 | return transformation.transform(point, scale); 264 | } 265 | }); 266 | 267 | L.Proj.imageOverlay = function (url, bounds, options) { 268 | return new L.Proj.ImageOverlay(url, bounds, options); 269 | }; 270 | 271 | return L.Proj; 272 | })); 273 | -------------------------------------------------------------------------------- /docs/libs/anchor-sections-1.1.0/anchor-sections-hash.css: -------------------------------------------------------------------------------- 1 | /* Styles for section anchors */ 2 | a.anchor-section::before {content: '#';font-size: 80%;} 3 | -------------------------------------------------------------------------------- /docs/libs/anchor-sections-1.1.0/anchor-sections.css: -------------------------------------------------------------------------------- 1 | /* Styles for section anchors */ 2 | a.anchor-section {margin-left: 10px; visibility: hidden; color: inherit;} 3 | .hasAnchor:hover a.anchor-section {visibility: visible;} 4 | ul > li > .anchor-section {display: none;} 5 | -------------------------------------------------------------------------------- /docs/libs/anchor-sections-1.1.0/anchor-sections.js: -------------------------------------------------------------------------------- 1 | document.addEventListener('DOMContentLoaded', function () { 2 | // If section divs is used, we need to put the anchor in the child header 3 | const headers = document.querySelectorAll("div.hasAnchor.section[class*='level'] > :first-child") 4 | 5 | headers.forEach(function (x) { 6 | // Add to the header node 7 | if (!x.classList.contains('hasAnchor')) x.classList.add('hasAnchor') 8 | // Remove from the section or div created by Pandoc 9 | x.parentElement.classList.remove('hasAnchor') 10 | }) 11 | }) 12 | -------------------------------------------------------------------------------- /docs/libs/bsTable-3.3.7/bootstrapTable.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap v3.3.7 (http://getbootstrap.com) 3 | * Copyright 2011-2018 Twitter, Inc. 4 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 5 | */ 6 | 7 | /*! 8 | * Generated using the Bootstrap Customizer () 9 | * Config saved to config.json and 10 | *//*! 11 | * Bootstrap v3.3.7 (http://getbootstrap.com) 12 | * Copyright 2011-2016 Twitter, Inc. 13 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 14 | *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */table{border-collapse:collapse;border-spacing:0}td,th{padding:0}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*="col-"]{position:static;float:none;display:table-column}table td[class*="col-"],table th[class*="col-"]{position:static;float:none;display:table-cell}.table>thead>tr>td.active,.table>tbody>tr>td.active,.table>tfoot>tr>td.active,.table>thead>tr>th.active,.table>tbody>tr>th.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>tbody>tr.active>td,.table>tfoot>tr.active>td,.table>thead>tr.active>th,.table>tbody>tr.active>th,.table>tfoot>tr.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover,.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr.active:hover>th{background-color:#e8e8e8}.table>thead>tr>td.success,.table>tbody>tr>td.success,.table>tfoot>tr>td.success,.table>thead>tr>th.success,.table>tbody>tr>th.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>tbody>tr.success>td,.table>tfoot>tr.success>td,.table>thead>tr.success>th,.table>tbody>tr.success>th,.table>tfoot>tr.success>th{background-color:#dff0d8}.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover,.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr.success:hover>th{background-color:#d0e9c6}.table>thead>tr>td.info,.table>tbody>tr>td.info,.table>tfoot>tr>td.info,.table>thead>tr>th.info,.table>tbody>tr>th.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>tbody>tr.info>td,.table>tfoot>tr.info>td,.table>thead>tr.info>th,.table>tbody>tr.info>th,.table>tfoot>tr.info>th{background-color:#d9edf7}.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover,.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr.info:hover>th{background-color:#c4e3f3}.table>thead>tr>td.warning,.table>tbody>tr>td.warning,.table>tfoot>tr>td.warning,.table>thead>tr>th.warning,.table>tbody>tr>th.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>tbody>tr.warning>td,.table>tfoot>tr.warning>td,.table>thead>tr.warning>th,.table>tbody>tr.warning>th,.table>tfoot>tr.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover,.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr.warning:hover>th{background-color:#faf2cc}.table>thead>tr>td.danger,.table>tbody>tr>td.danger,.table>tfoot>tr>td.danger,.table>thead>tr>th.danger,.table>tbody>tr>th.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>tbody>tr.danger>td,.table>tfoot>tr.danger>td,.table>thead>tr.danger>th,.table>tbody>tr.danger>th,.table>tfoot>tr.danger>th{background-color:#f2dede}.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover,.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr.danger:hover>th{background-color:#ebcccc}.table-responsive{overflow-x:auto;min-height:0.01%}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:12px;opacity:0;filter:alpha(opacity=0)}.tooltip.in{opacity:.9;filter:alpha(opacity=90)}.tooltip.top{margin-top:-3px;padding:5px 0}.tooltip.right{margin-left:3px;padding:0 5px}.tooltip.bottom{margin-top:3px;padding:5px 0}.tooltip.left{margin-left:-3px;padding:0 5px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{bottom:0;right:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:14px;background-color:#fff;-webkit-background-clip:padding-box;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2)}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{margin:0;padding:8px 14px;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{border-width:10px;content:""}.popover.top>.arrow{left:50%;margin-left:-11px;border-bottom-width:0;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);bottom:-11px}.popover.top>.arrow:after{content:" ";bottom:1px;margin-left:-10px;border-bottom-width:0;border-top-color:#fff}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-left-width:0;border-right-color:#999;border-right-color:rgba(0,0,0,0.25)}.popover.right>.arrow:after{content:" ";left:1px;bottom:-10px;border-left-width:0;border-right-color:#fff}.popover.bottom>.arrow{left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);top:-11px}.popover.bottom>.arrow:after{content:" ";top:1px;margin-left:-10px;border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,0.25)}.popover.left>.arrow:after{content:" ";right:1px;border-right-width:0;border-left-color:#fff;bottom:-10px}.clearfix:before,.clearfix:after{content:" ";display:table}.clearfix:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right !important}.pull-left{float:left !important}.hide{display:none !important}.show{display:block !important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none !important}.affix{position:fixed} 15 | -------------------------------------------------------------------------------- /docs/libs/clipboard-0.0.1/setClipboardText.js: -------------------------------------------------------------------------------- 1 | // taken from 2 | // https://ourcodeworld.com/articles/read/143/how-to-copy-text-to-clipboard-with-javascript-easily 3 | function setClipboardText(text) { 4 | var id = 'mycustom-clipboard-textarea-hidden-id'; 5 | var existsTextarea = document.getElementById(id); 6 | 7 | if (!existsTextarea) { 8 | console.log('Creating textarea'); 9 | var textarea = document.createElement('textarea'); 10 | textarea.id = id; 11 | // Place in top-left corner of screen regardless of scroll position. 12 | textarea.style.position = 'fixed'; 13 | textarea.style.top = 0; 14 | textarea.style.left = 0; 15 | 16 | // Ensure it has a small width and height. Setting to 1px / 1em 17 | // doesn't work as this gives a negative w/h on some browsers. 18 | textarea.style.width = '1px'; 19 | textarea.style.height = '1px'; 20 | 21 | // We don't need padding, reducing the size if it does flash render. 22 | textarea.style.padding = 0; 23 | 24 | // Clean up any borders. 25 | textarea.style.border = 'none'; 26 | textarea.style.outline = 'none'; 27 | textarea.style.boxShadow = 'none'; 28 | 29 | // Avoid flash of white box if rendered for any reason. 30 | textarea.style.background = 'transparent'; 31 | document.querySelector('body').appendChild(textarea); 32 | console.log('The textarea now exists :)'); 33 | existsTextarea = document.getElementById(id); 34 | } else { 35 | console.log('The textarea already exists :3'); 36 | } 37 | 38 | existsTextarea.value = text; 39 | existsTextarea.select(); 40 | 41 | try { 42 | var status = document.execCommand('copy'); 43 | if (!status) { 44 | console.error('Cannot copy text'); 45 | } else { 46 | console.log('The text is now on the clipboard'); 47 | } 48 | } catch (err) { 49 | console.log('Unable to copy.'); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /docs/libs/crosstalk-1.2.0/css/crosstalk.min.css: -------------------------------------------------------------------------------- 1 | .container-fluid.crosstalk-bscols{margin-left:-30px;margin-right:-30px;white-space:normal}body>.container-fluid.crosstalk-bscols{margin-left:auto;margin-right:auto}.crosstalk-input-checkboxgroup .crosstalk-options-group .crosstalk-options-column{display:inline-block;padding-right:12px;vertical-align:top}@media only screen and (max-width: 480px){.crosstalk-input-checkboxgroup .crosstalk-options-group .crosstalk-options-column{display:block;padding-right:inherit}}.crosstalk-input{margin-bottom:15px}.crosstalk-input .control-label{margin-bottom:0;vertical-align:middle}.crosstalk-input input[type="checkbox"]{margin:4px 0 0;margin-top:1px;line-height:normal}.crosstalk-input .checkbox{position:relative;display:block;margin-top:10px;margin-bottom:10px}.crosstalk-input .checkbox>label{padding-left:20px;margin-bottom:0;font-weight:400;cursor:pointer}.crosstalk-input .checkbox input[type="checkbox"],.crosstalk-input .checkbox-inline input[type="checkbox"]{position:absolute;margin-top:2px;margin-left:-20px}.crosstalk-input .checkbox+.checkbox{margin-top:-5px}.crosstalk-input .checkbox-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;font-weight:400;vertical-align:middle;cursor:pointer}.crosstalk-input .checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px} 2 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/fontawesome/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WildCoLab/Introduction-to-Camera-Trap-Data-Management-and-Analysis-in-R/6b4170abbad9f7f7762ef6874e1246b28000f371/docs/libs/gitbook-2.6.7/css/fontawesome/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/plugin-bookdown.css: -------------------------------------------------------------------------------- 1 | .book .book-header h1 { 2 | padding-left: 20px; 3 | padding-right: 20px; 4 | } 5 | .book .book-header.fixed { 6 | position: fixed; 7 | right: 0; 8 | top: 0; 9 | left: 0; 10 | border-bottom: 1px solid rgba(0,0,0,.07); 11 | } 12 | span.search-highlight { 13 | background-color: #ffff88; 14 | } 15 | @media (min-width: 600px) { 16 | .book.with-summary .book-header.fixed { 17 | left: 300px; 18 | } 19 | } 20 | @media (max-width: 1240px) { 21 | .book .book-body.fixed { 22 | top: 50px; 23 | } 24 | .book .book-body.fixed .body-inner { 25 | top: auto; 26 | } 27 | } 28 | @media (max-width: 600px) { 29 | .book.with-summary .book-header.fixed { 30 | left: calc(100% - 60px); 31 | min-width: 300px; 32 | } 33 | .book.with-summary .book-body { 34 | transform: none; 35 | left: calc(100% - 60px); 36 | min-width: 300px; 37 | } 38 | .book .book-body.fixed { 39 | top: 0; 40 | } 41 | } 42 | 43 | .book .book-body.fixed .body-inner { 44 | top: 50px; 45 | } 46 | .book .book-body .page-wrapper .page-inner section.normal sub, .book .book-body .page-wrapper .page-inner section.normal sup { 47 | font-size: 85%; 48 | } 49 | 50 | @media print { 51 | .book .book-summary, .book .book-body .book-header, .fa { 52 | display: none !important; 53 | } 54 | .book .book-body.fixed { 55 | left: 0px; 56 | } 57 | .book .book-body,.book .book-body .body-inner, .book.with-summary { 58 | overflow: visible !important; 59 | } 60 | } 61 | .kable_wrapper { 62 | border-spacing: 20px 0; 63 | border-collapse: separate; 64 | border: none; 65 | margin: auto; 66 | } 67 | .kable_wrapper > tbody > tr > td { 68 | vertical-align: top; 69 | } 70 | .book .book-body .page-wrapper .page-inner section.normal table tr.header { 71 | border-top-width: 2px; 72 | } 73 | .book .book-body .page-wrapper .page-inner section.normal table tr:last-child td { 74 | border-bottom-width: 2px; 75 | } 76 | .book .book-body .page-wrapper .page-inner section.normal table td, .book .book-body .page-wrapper .page-inner section.normal table th { 77 | border-left: none; 78 | border-right: none; 79 | } 80 | .book .book-body .page-wrapper .page-inner section.normal table.kable_wrapper > tbody > tr, .book .book-body .page-wrapper .page-inner section.normal table.kable_wrapper > tbody > tr > td { 81 | border-top: none; 82 | } 83 | .book .book-body .page-wrapper .page-inner section.normal table.kable_wrapper > tbody > tr:last-child > td { 84 | border-bottom: none; 85 | } 86 | 87 | div.theorem, div.lemma, div.corollary, div.proposition, div.conjecture { 88 | font-style: italic; 89 | } 90 | span.theorem, span.lemma, span.corollary, span.proposition, span.conjecture { 91 | font-style: normal; 92 | } 93 | div.proof>*:last-child:after { 94 | content: "\25a2"; 95 | float: right; 96 | } 97 | .header-section-number { 98 | padding-right: .5em; 99 | } 100 | #header .multi-author { 101 | margin: 0.5em 0 -0.5em 0; 102 | } 103 | #header .date { 104 | margin-top: 1.5em; 105 | } 106 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/plugin-clipboard.css: -------------------------------------------------------------------------------- 1 | div.sourceCode { 2 | position: relative; 3 | } 4 | 5 | .copy-to-clipboard-button { 6 | position: absolute; 7 | right: 0; 8 | top: 0; 9 | visibility: hidden; 10 | } 11 | 12 | .copy-to-clipboard-button:focus { 13 | outline: 0; 14 | } 15 | 16 | div.sourceCode:hover > .copy-to-clipboard-button { 17 | visibility: visible; 18 | } 19 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/plugin-fontsettings.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Theme 1 3 | */ 4 | .color-theme-1 .dropdown-menu { 5 | background-color: #111111; 6 | border-color: #7e888b; 7 | } 8 | .color-theme-1 .dropdown-menu .dropdown-caret .caret-inner { 9 | border-bottom: 9px solid #111111; 10 | } 11 | .color-theme-1 .dropdown-menu .buttons { 12 | border-color: #7e888b; 13 | } 14 | .color-theme-1 .dropdown-menu .button { 15 | color: #afa790; 16 | } 17 | .color-theme-1 .dropdown-menu .button:hover { 18 | color: #73553c; 19 | } 20 | /* 21 | * Theme 2 22 | */ 23 | .color-theme-2 .dropdown-menu { 24 | background-color: #2d3143; 25 | border-color: #272a3a; 26 | } 27 | .color-theme-2 .dropdown-menu .dropdown-caret .caret-inner { 28 | border-bottom: 9px solid #2d3143; 29 | } 30 | .color-theme-2 .dropdown-menu .buttons { 31 | border-color: #272a3a; 32 | } 33 | .color-theme-2 .dropdown-menu .button { 34 | color: #62677f; 35 | } 36 | .color-theme-2 .dropdown-menu .button:hover { 37 | color: #f4f4f5; 38 | } 39 | .book .book-header .font-settings .font-enlarge { 40 | line-height: 30px; 41 | font-size: 1.4em; 42 | } 43 | .book .book-header .font-settings .font-reduce { 44 | line-height: 30px; 45 | font-size: 1em; 46 | } 47 | 48 | /* sidebar transition background */ 49 | div.book.color-theme-1 { 50 | background: #f3eacb; 51 | } 52 | .book.color-theme-1 .book-body { 53 | color: #704214; 54 | background: #f3eacb; 55 | } 56 | .book.color-theme-1 .book-body .page-wrapper .page-inner section { 57 | background: #f3eacb; 58 | } 59 | 60 | /* sidebar transition background */ 61 | div.book.color-theme-2 { 62 | background: #1c1f2b; 63 | } 64 | 65 | .book.color-theme-2 .book-body { 66 | color: #bdcadb; 67 | background: #1c1f2b; 68 | } 69 | .book.color-theme-2 .book-body .page-wrapper .page-inner section { 70 | background: #1c1f2b; 71 | } 72 | .book.font-size-0 .book-body .page-inner section { 73 | font-size: 1.2rem; 74 | } 75 | .book.font-size-1 .book-body .page-inner section { 76 | font-size: 1.4rem; 77 | } 78 | .book.font-size-2 .book-body .page-inner section { 79 | font-size: 1.6rem; 80 | } 81 | .book.font-size-3 .book-body .page-inner section { 82 | font-size: 2.2rem; 83 | } 84 | .book.font-size-4 .book-body .page-inner section { 85 | font-size: 4rem; 86 | } 87 | .book.font-family-0 { 88 | font-family: Georgia, serif; 89 | } 90 | .book.font-family-1 { 91 | font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; 92 | } 93 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal { 94 | color: #704214; 95 | } 96 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal a { 97 | color: inherit; 98 | } 99 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h1, 100 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h2, 101 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h3, 102 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h4, 103 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h5, 104 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h6 { 105 | color: inherit; 106 | } 107 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h1, 108 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h2 { 109 | border-color: inherit; 110 | } 111 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal h6 { 112 | color: inherit; 113 | } 114 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal hr { 115 | background-color: inherit; 116 | } 117 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal blockquote { 118 | border-color: #c4b29f; 119 | opacity: 0.9; 120 | } 121 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal pre, 122 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal code { 123 | background: #fdf6e3; 124 | color: #657b83; 125 | border-color: #f8df9c; 126 | } 127 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal .highlight { 128 | background-color: inherit; 129 | } 130 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal table th, 131 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal table td { 132 | border-color: #f5d06c; 133 | } 134 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal table tr { 135 | color: inherit; 136 | background-color: #fdf6e3; 137 | border-color: #444444; 138 | } 139 | .book.color-theme-1 .book-body .page-wrapper .page-inner section.normal table tr:nth-child(2n) { 140 | background-color: #fbeecb; 141 | } 142 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal { 143 | color: #bdcadb; 144 | } 145 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal a { 146 | color: #3eb1d0; 147 | } 148 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h1, 149 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h2, 150 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h3, 151 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h4, 152 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h5, 153 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h6 { 154 | color: #fffffa; 155 | } 156 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h1, 157 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h2 { 158 | border-color: #373b4e; 159 | } 160 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal h6 { 161 | color: #373b4e; 162 | } 163 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal hr { 164 | background-color: #373b4e; 165 | } 166 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal blockquote { 167 | border-color: #373b4e; 168 | } 169 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal pre, 170 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal code { 171 | color: #9dbed8; 172 | background: #2d3143; 173 | border-color: #2d3143; 174 | } 175 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal .highlight { 176 | background-color: #282a39; 177 | } 178 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal table th, 179 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal table td { 180 | border-color: #3b3f54; 181 | } 182 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal table tr { 183 | color: #b6c2d2; 184 | background-color: #2d3143; 185 | border-color: #3b3f54; 186 | } 187 | .book.color-theme-2 .book-body .page-wrapper .page-inner section.normal table tr:nth-child(2n) { 188 | background-color: #35394b; 189 | } 190 | .book.color-theme-1 .book-header { 191 | color: #afa790; 192 | background: transparent; 193 | } 194 | .book.color-theme-1 .book-header .btn { 195 | color: #afa790; 196 | } 197 | .book.color-theme-1 .book-header .btn:hover { 198 | color: #73553c; 199 | background: none; 200 | } 201 | .book.color-theme-1 .book-header h1 { 202 | color: #704214; 203 | } 204 | .book.color-theme-2 .book-header { 205 | color: #7e888b; 206 | background: transparent; 207 | } 208 | .book.color-theme-2 .book-header .btn { 209 | color: #3b3f54; 210 | } 211 | .book.color-theme-2 .book-header .btn:hover { 212 | color: #fffff5; 213 | background: none; 214 | } 215 | .book.color-theme-2 .book-header h1 { 216 | color: #bdcadb; 217 | } 218 | .book.color-theme-1 .book-body .navigation { 219 | color: #afa790; 220 | } 221 | .book.color-theme-1 .book-body .navigation:hover { 222 | color: #73553c; 223 | } 224 | .book.color-theme-2 .book-body .navigation { 225 | color: #383f52; 226 | } 227 | .book.color-theme-2 .book-body .navigation:hover { 228 | color: #fffff5; 229 | } 230 | /* 231 | * Theme 1 232 | */ 233 | .book.color-theme-1 .book-summary { 234 | color: #afa790; 235 | background: #111111; 236 | border-right: 1px solid rgba(0, 0, 0, 0.07); 237 | } 238 | .book.color-theme-1 .book-summary .book-search { 239 | background: transparent; 240 | } 241 | .book.color-theme-1 .book-summary .book-search input, 242 | .book.color-theme-1 .book-summary .book-search input:focus { 243 | border: 1px solid transparent; 244 | } 245 | .book.color-theme-1 .book-summary ul.summary li.divider { 246 | background: #7e888b; 247 | box-shadow: none; 248 | } 249 | .book.color-theme-1 .book-summary ul.summary li i.fa-check { 250 | color: #33cc33; 251 | } 252 | .book.color-theme-1 .book-summary ul.summary li.done > a { 253 | color: #877f6a; 254 | } 255 | .book.color-theme-1 .book-summary ul.summary li a, 256 | .book.color-theme-1 .book-summary ul.summary li span { 257 | color: #877f6a; 258 | background: transparent; 259 | font-weight: normal; 260 | } 261 | .book.color-theme-1 .book-summary ul.summary li.active > a, 262 | .book.color-theme-1 .book-summary ul.summary li a:hover { 263 | color: #704214; 264 | background: transparent; 265 | font-weight: normal; 266 | } 267 | /* 268 | * Theme 2 269 | */ 270 | .book.color-theme-2 .book-summary { 271 | color: #bcc1d2; 272 | background: #2d3143; 273 | border-right: none; 274 | } 275 | .book.color-theme-2 .book-summary .book-search { 276 | background: transparent; 277 | } 278 | .book.color-theme-2 .book-summary .book-search input, 279 | .book.color-theme-2 .book-summary .book-search input:focus { 280 | border: 1px solid transparent; 281 | } 282 | .book.color-theme-2 .book-summary ul.summary li.divider { 283 | background: #272a3a; 284 | box-shadow: none; 285 | } 286 | .book.color-theme-2 .book-summary ul.summary li i.fa-check { 287 | color: #33cc33; 288 | } 289 | .book.color-theme-2 .book-summary ul.summary li.done > a { 290 | color: #62687f; 291 | } 292 | .book.color-theme-2 .book-summary ul.summary li a, 293 | .book.color-theme-2 .book-summary ul.summary li span { 294 | color: #c1c6d7; 295 | background: transparent; 296 | font-weight: 600; 297 | } 298 | .book.color-theme-2 .book-summary ul.summary li.active > a, 299 | .book.color-theme-2 .book-summary ul.summary li a:hover { 300 | color: #f4f4f5; 301 | background: #252737; 302 | font-weight: 600; 303 | } 304 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/plugin-search.css: -------------------------------------------------------------------------------- 1 | .book .book-summary .book-search { 2 | padding: 6px; 3 | background: transparent; 4 | position: absolute; 5 | top: -50px; 6 | left: 0px; 7 | right: 0px; 8 | transition: top 0.5s ease; 9 | } 10 | .book .book-summary .book-search input, 11 | .book .book-summary .book-search input:focus, 12 | .book .book-summary .book-search input:hover { 13 | width: 100%; 14 | background: transparent; 15 | border: 1px solid #ccc; 16 | box-shadow: none; 17 | outline: none; 18 | line-height: 22px; 19 | padding: 7px 4px; 20 | color: inherit; 21 | box-sizing: border-box; 22 | } 23 | .book.with-search .book-summary .book-search { 24 | top: 0px; 25 | } 26 | .book.with-search .book-summary ul.summary { 27 | top: 50px; 28 | } 29 | .with-search .summary li[data-level] a[href*=".html#"] { 30 | display: none; 31 | } 32 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/css/plugin-table.css: -------------------------------------------------------------------------------- 1 | .book .book-body .page-wrapper .page-inner section.normal table{display:table;width:100%;border-collapse:collapse;border-spacing:0;overflow:auto}.book .book-body .page-wrapper .page-inner section.normal table td,.book .book-body .page-wrapper .page-inner section.normal table th{padding:6px 13px;border:1px solid #ddd}.book .book-body .page-wrapper .page-inner section.normal table tr{background-color:#fff;border-top:1px solid #ccc}.book .book-body .page-wrapper .page-inner section.normal table tr:nth-child(2n){background-color:#f8f8f8}.book .book-body .page-wrapper .page-inner section.normal table th{font-weight:700} 2 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/js/clipboard.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * clipboard.js v2.0.4 3 | * https://zenorocha.github.io/clipboard.js 4 | * 5 | * Licensed MIT © Zeno Rocha 6 | */ 7 | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return function(n){var o={};function r(t){if(o[t])return o[t].exports;var e=o[t]={i:t,l:!1,exports:{}};return n[t].call(e.exports,e,e.exports,r),e.l=!0,e.exports}return r.m=n,r.c=o,r.d=function(t,e,n){r.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},r.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return r.d(e,"a",e),e},r.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},r.p="",r(r.s=0)}([function(t,e,n){"use strict";var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},i=function(){function o(t,e){for(var n=0;n indicates arrow keys):', 82 | '/: navigate to previous/next page', 83 | 's: Toggle sidebar']; 84 | if (config.search !== false) info.push('f: Toggle search input ' + 85 | '(use //Enter in the search input to navigate through search matches; ' + 86 | 'press Esc to cancel search)'); 87 | if (config.info !== false) gitbook.toolbar.createButton({ 88 | icon: 'fa fa-info', 89 | label: 'Information about the toolbar', 90 | position: 'left', 91 | onClick: function(e) { 92 | e.preventDefault(); 93 | window.alert(info.join('\n\n')); 94 | } 95 | }); 96 | 97 | // highlight the current section in TOC 98 | var href = window.location.pathname; 99 | href = href.substr(href.lastIndexOf('/') + 1); 100 | // accentuated characters need to be decoded (#819) 101 | href = decodeURIComponent(href); 102 | if (href === '') href = 'index.html'; 103 | var li = $('a[href^="' + href + location.hash + '"]').parent('li.chapter').first(); 104 | var summary = $('ul.summary'), chaps = summary.find('li.chapter'); 105 | if (li.length === 0) li = chaps.first(); 106 | li.addClass('active'); 107 | chaps.on('click', function(e) { 108 | chaps.removeClass('active'); 109 | $(this).addClass('active'); 110 | gs.set('tocScrollTop', summary.scrollTop()); 111 | }); 112 | 113 | var toc = config.toc; 114 | // collapse TOC items that are not for the current chapter 115 | if (toc && toc.collapse) (function() { 116 | var type = toc.collapse; 117 | if (type === 'none') return; 118 | if (type !== 'section' && type !== 'subsection') return; 119 | // sections under chapters 120 | var toc_sub = summary.children('li[data-level]').children('ul'); 121 | if (type === 'section') { 122 | toc_sub.hide() 123 | .parent().has(li).children('ul').show(); 124 | } else { 125 | toc_sub.children('li').children('ul').hide() 126 | .parent().has(li).children('ul').show(); 127 | } 128 | li.children('ul').show(); 129 | var toc_sub2 = toc_sub.children('li'); 130 | if (type === 'section') toc_sub2.children('ul').hide(); 131 | summary.children('li[data-level]').find('a') 132 | .on('click.bookdown', function(e) { 133 | if (href === $(this).attr('href').replace(/#.*/, '')) 134 | $(this).parent('li').children('ul').toggle(); 135 | }); 136 | })(); 137 | 138 | // add tooltips to the 's that are truncated 139 | $('a').each(function(i, el) { 140 | if (el.offsetWidth >= el.scrollWidth) return; 141 | if (typeof el.title === 'undefined') return; 142 | el.title = el.text; 143 | }); 144 | 145 | // restore TOC scroll position 146 | var pos = gs.get('tocScrollTop'); 147 | if (typeof pos !== 'undefined') summary.scrollTop(pos); 148 | 149 | // highlight the TOC item that has same text as the heading in view as scrolling 150 | if (toc && toc.scroll_highlight !== false && li.length > 0) (function() { 151 | // scroll the current TOC item into viewport 152 | var ht = $(window).height(), rect = li[0].getBoundingClientRect(); 153 | if (rect.top >= ht || rect.top <= 0 || rect.bottom <= 0) { 154 | summary.scrollTop(li[0].offsetTop); 155 | } 156 | // current chapter TOC items 157 | var items = $('a[href^="' + href + '"]').parent('li.chapter'), 158 | m = items.length; 159 | if (m === 0) { 160 | items = summary.find('li.chapter'); 161 | m = items.length; 162 | } 163 | if (m === 0) return; 164 | // all section titles on current page 165 | var hs = bookInner.find('.page-inner').find('h1,h2,h3'), n = hs.length, 166 | ts = hs.map(function(i, el) { return $(el).text(); }); 167 | if (n === 0) return; 168 | var scrollHandler = function(e) { 169 | var ht = $(window).height(); 170 | clearTimeout($.data(this, 'scrollTimer')); 171 | $.data(this, 'scrollTimer', setTimeout(function() { 172 | // find the first visible title in the viewport 173 | for (var i = 0; i < n; i++) { 174 | var rect = hs[i].getBoundingClientRect(); 175 | if (rect.top >= 0 && rect.bottom <= ht) break; 176 | } 177 | if (i === n) return; 178 | items.removeClass('active'); 179 | for (var j = 0; j < m; j++) { 180 | if (items.eq(j).children('a').first().text() === ts[i]) break; 181 | } 182 | if (j === m) j = 0; // highlight the chapter title 183 | // search bottom-up for a visible TOC item to highlight; if an item is 184 | // hidden, we check if its parent is visible, and so on 185 | while (j > 0 && items.eq(j).is(':hidden')) j--; 186 | items.eq(j).addClass('active'); 187 | }, 250)); 188 | }; 189 | bookInner.on('scroll.bookdown', scrollHandler); 190 | bookBody.on('scroll.bookdown', scrollHandler); 191 | })(); 192 | 193 | // do not refresh the page if the TOC item points to the current page 194 | $('a[href="' + href + '"]').parent('li.chapter').children('a') 195 | .on('click', function(e) { 196 | bookInner.scrollTop(0); 197 | bookBody.scrollTop(0); 198 | return false; 199 | }); 200 | 201 | var toolbar = config.toolbar; 202 | if (!toolbar || toolbar.position !== 'static') { 203 | var bookHeader = $('.book-header'); 204 | bookBody.addClass('fixed'); 205 | bookHeader.addClass('fixed') 206 | .css('background-color', bookBody.css('background-color')) 207 | .on('click.bookdown', function(e) { 208 | // the theme may have changed after user clicks the theme button 209 | bookHeader.css('background-color', bookBody.css('background-color')); 210 | }); 211 | } 212 | 213 | }); 214 | 215 | gitbook.events.bind("page.change", function(e) { 216 | // store TOC scroll position 217 | var summary = $('ul.summary'); 218 | gs.set('tocScrollTop', summary.scrollTop()); 219 | }); 220 | 221 | var bookBody = $('.book-body'), bookInner = bookBody.find('.body-inner'); 222 | var chapterTitle = function() { 223 | return bookInner.find('.page-inner').find('h1,h2').first().text(); 224 | }; 225 | var saveScrollPos = function(e) { 226 | // save scroll position before page is reloaded 227 | gs.set('bodyScrollTop', { 228 | body: bookBody.scrollTop(), 229 | inner: bookInner.scrollTop(), 230 | focused: document.hasFocus(), 231 | title: chapterTitle() 232 | }); 233 | }; 234 | $(document).on('servr:reload', saveScrollPos); 235 | 236 | // check if the page is loaded in an iframe (e.g. the RStudio preview window) 237 | var inIFrame = function() { 238 | var inIframe = true; 239 | try { inIframe = window.self !== window.top; } catch (e) {} 240 | return inIframe; 241 | }; 242 | if (inIFrame()) { 243 | $(window).on('blur unload', saveScrollPos); 244 | } 245 | 246 | $(function(e) { 247 | var pos = gs.get('bodyScrollTop'); 248 | if (pos) { 249 | if (pos.title === chapterTitle()) { 250 | if (pos.body !== 0) bookBody.scrollTop(pos.body); 251 | if (pos.inner !== 0) bookInner.scrollTop(pos.inner); 252 | } 253 | } 254 | if ((pos && pos.focused) || !inIFrame()) bookInner.find('.page-wrapper').focus(); 255 | // clear book body scroll position 256 | gs.remove('bodyScrollTop'); 257 | }); 258 | 259 | }); 260 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/js/plugin-clipboard.js: -------------------------------------------------------------------------------- 1 | gitbook.require(["gitbook", "jQuery"], function(gitbook, $) { 2 | 3 | var copyButton = ''; 4 | var clipboard; 5 | 6 | gitbook.events.bind("page.change", function() { 7 | 8 | if (!ClipboardJS.isSupported()) return; 9 | 10 | // the page.change event is thrown twice: before and after the page changes 11 | if (clipboard) { 12 | // clipboard is already defined but we are on the same page 13 | if (clipboard._prevPage === window.location.pathname) return; 14 | // clipboard is already defined and url path change 15 | // we can deduct that we are before page changes 16 | clipboard.destroy(); // destroy the previous events listeners 17 | clipboard = undefined; // reset the clipboard object 18 | return; 19 | } 20 | 21 | $(copyButton).prependTo("div.sourceCode"); 22 | 23 | clipboard = new ClipboardJS(".copy-to-clipboard-button", { 24 | text: function(trigger) { 25 | return trigger.parentNode.textContent; 26 | } 27 | }); 28 | 29 | clipboard._prevPage = window.location.pathname 30 | 31 | }); 32 | 33 | }); 34 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/js/plugin-fontsettings.js: -------------------------------------------------------------------------------- 1 | gitbook.require(["gitbook", "lodash", "jQuery"], function(gitbook, _, $) { 2 | var fontState; 3 | 4 | var THEMES = { 5 | "white": 0, 6 | "sepia": 1, 7 | "night": 2 8 | }; 9 | 10 | var FAMILY = { 11 | "serif": 0, 12 | "sans": 1 13 | }; 14 | 15 | // Save current font settings 16 | function saveFontSettings() { 17 | gitbook.storage.set("fontState", fontState); 18 | update(); 19 | } 20 | 21 | // Increase font size 22 | function enlargeFontSize(e) { 23 | e.preventDefault(); 24 | if (fontState.size >= 4) return; 25 | 26 | fontState.size++; 27 | saveFontSettings(); 28 | }; 29 | 30 | // Decrease font size 31 | function reduceFontSize(e) { 32 | e.preventDefault(); 33 | if (fontState.size <= 0) return; 34 | 35 | fontState.size--; 36 | saveFontSettings(); 37 | }; 38 | 39 | // Change font family 40 | function changeFontFamily(index, e) { 41 | e.preventDefault(); 42 | 43 | fontState.family = index; 44 | saveFontSettings(); 45 | }; 46 | 47 | // Change type of color 48 | function changeColorTheme(index, e) { 49 | e.preventDefault(); 50 | 51 | var $book = $(".book"); 52 | 53 | if (fontState.theme !== 0) 54 | $book.removeClass("color-theme-"+fontState.theme); 55 | 56 | fontState.theme = index; 57 | if (fontState.theme !== 0) 58 | $book.addClass("color-theme-"+fontState.theme); 59 | 60 | saveFontSettings(); 61 | }; 62 | 63 | function update() { 64 | var $book = gitbook.state.$book; 65 | 66 | $(".font-settings .font-family-list li").removeClass("active"); 67 | $(".font-settings .font-family-list li:nth-child("+(fontState.family+1)+")").addClass("active"); 68 | 69 | $book[0].className = $book[0].className.replace(/\bfont-\S+/g, ''); 70 | $book.addClass("font-size-"+fontState.size); 71 | $book.addClass("font-family-"+fontState.family); 72 | 73 | if(fontState.theme !== 0) { 74 | $book[0].className = $book[0].className.replace(/\bcolor-theme-\S+/g, ''); 75 | $book.addClass("color-theme-"+fontState.theme); 76 | } 77 | }; 78 | 79 | function init(config) { 80 | var $bookBody, $book; 81 | 82 | //Find DOM elements. 83 | $book = gitbook.state.$book; 84 | $bookBody = $book.find(".book-body"); 85 | 86 | // Instantiate font state object 87 | fontState = gitbook.storage.get("fontState", { 88 | size: config.size || 2, 89 | family: FAMILY[config.family || "sans"], 90 | theme: THEMES[config.theme || "white"] 91 | }); 92 | 93 | update(); 94 | }; 95 | 96 | 97 | gitbook.events.bind("start", function(e, config) { 98 | var opts = config.fontsettings; 99 | if (!opts) return; 100 | 101 | // Create buttons in toolbar 102 | gitbook.toolbar.createButton({ 103 | icon: 'fa fa-font', 104 | label: 'Font Settings', 105 | className: 'font-settings', 106 | dropdown: [ 107 | [ 108 | { 109 | text: 'A', 110 | className: 'font-reduce', 111 | onClick: reduceFontSize 112 | }, 113 | { 114 | text: 'A', 115 | className: 'font-enlarge', 116 | onClick: enlargeFontSize 117 | } 118 | ], 119 | [ 120 | { 121 | text: 'Serif', 122 | onClick: _.partial(changeFontFamily, 0) 123 | }, 124 | { 125 | text: 'Sans', 126 | onClick: _.partial(changeFontFamily, 1) 127 | } 128 | ], 129 | [ 130 | { 131 | text: 'White', 132 | onClick: _.partial(changeColorTheme, 0) 133 | }, 134 | { 135 | text: 'Sepia', 136 | onClick: _.partial(changeColorTheme, 1) 137 | }, 138 | { 139 | text: 'Night', 140 | onClick: _.partial(changeColorTheme, 2) 141 | } 142 | ] 143 | ] 144 | }); 145 | 146 | 147 | // Init current settings 148 | init(opts); 149 | }); 150 | }); 151 | 152 | 153 | -------------------------------------------------------------------------------- /docs/libs/gitbook-2.6.7/js/plugin-search.js: -------------------------------------------------------------------------------- 1 | gitbook.require(["gitbook", "lodash", "jQuery"], function(gitbook, _, $) { 2 | var index = null; 3 | var fuse = null; 4 | var _search = {engine: 'lunr', opts: {}}; 5 | var $searchInput, $searchLabel, $searchForm; 6 | var $highlighted = [], hi, hiOpts = { className: 'search-highlight' }; 7 | var collapse = false, toc_visible = []; 8 | 9 | function init(config) { 10 | // Instantiate search settings 11 | _search = gitbook.storage.get("search", { 12 | engine: config.search.engine || 'lunr', 13 | opts: config.search.options || {}, 14 | }); 15 | }; 16 | 17 | // Save current search settings 18 | function saveSearchSettings() { 19 | gitbook.storage.set("search", _search); 20 | } 21 | 22 | // Use a specific index 23 | function loadIndex(data) { 24 | // [Yihui] In bookdown, I use a character matrix to store the chapter 25 | // content, and the index is dynamically built on the client side. 26 | // Gitbook prebuilds the index data instead: https://github.com/GitbookIO/plugin-search 27 | // We can certainly do that via R packages V8 and jsonlite, but let's 28 | // see how slow it really is before improving it. On the other hand, 29 | // lunr cannot handle non-English text very well, e.g. the default 30 | // tokenizer cannot deal with Chinese text, so we may want to replace 31 | // lunr with a dumb simple text matching approach. 32 | if (_search.engine === 'lunr') { 33 | index = lunr(function () { 34 | this.ref('url'); 35 | this.field('title', { boost: 10 }); 36 | this.field('body'); 37 | }); 38 | data.map(function(item) { 39 | index.add({ 40 | url: item[0], 41 | title: item[1], 42 | body: item[2] 43 | }); 44 | }); 45 | return; 46 | } 47 | fuse = new Fuse(data.map((_data => { 48 | return { 49 | url: _data[0], 50 | title: _data[1], 51 | body: _data[2] 52 | }; 53 | })), Object.assign( 54 | { 55 | includeScore: true, 56 | threshold: 0.1, 57 | ignoreLocation: true, 58 | keys: ["title", "body"] 59 | }, 60 | _search.opts 61 | )); 62 | } 63 | 64 | // Fetch the search index 65 | function fetchIndex() { 66 | return $.getJSON(gitbook.state.basePath+"/search_index.json") 67 | .then(loadIndex); // [Yihui] we need to use this object later 68 | } 69 | 70 | // Search for a term and return results 71 | function search(q) { 72 | let results = []; 73 | switch (_search.engine) { 74 | case 'fuse': 75 | if (!fuse) return; 76 | results = fuse.search(q).map(function(result) { 77 | var parts = result.item.url.split('#'); 78 | return { 79 | path: parts[0], 80 | hash: parts[1] 81 | }; 82 | }); 83 | break; 84 | case 'lunr': 85 | default: 86 | if (!index) return; 87 | results = _.chain(index.search(q)).map(function(result) { 88 | var parts = result.ref.split("#"); 89 | return { 90 | path: parts[0], 91 | hash: parts[1] 92 | }; 93 | }) 94 | .value(); 95 | } 96 | 97 | // [Yihui] Highlight the search keyword on current page 98 | $highlighted = $('.page-inner') 99 | .unhighlight(hiOpts).highlight(q, hiOpts).find('span.search-highlight'); 100 | scrollToHighlighted(0); 101 | 102 | return results; 103 | } 104 | 105 | // [Yihui] Scroll the chapter body to the i-th highlighted string 106 | function scrollToHighlighted(d) { 107 | var n = $highlighted.length; 108 | hi = hi === undefined ? 0 : hi + d; 109 | // navignate to the previous/next page in the search results if reached the top/bottom 110 | var b = hi < 0; 111 | if (d !== 0 && (b || hi >= n)) { 112 | var path = currentPath(), n2 = toc_visible.length; 113 | if (n2 === 0) return; 114 | for (var i = b ? 0 : n2; (b && i < n2) || (!b && i >= 0); i += b ? 1 : -1) { 115 | if (toc_visible.eq(i).data('path') === path) break; 116 | } 117 | i += b ? -1 : 1; 118 | if (i < 0) i = n2 - 1; 119 | if (i >= n2) i = 0; 120 | var lnk = toc_visible.eq(i).find('a[href$=".html"]'); 121 | if (lnk.length) lnk[0].click(); 122 | return; 123 | } 124 | if (n === 0) return; 125 | var $p = $highlighted.eq(hi); 126 | $p[0].scrollIntoView(); 127 | $highlighted.css('background-color', ''); 128 | // an orange background color on the current item and removed later 129 | $p.css('background-color', 'orange'); 130 | setTimeout(function() { 131 | $p.css('background-color', ''); 132 | }, 2000); 133 | } 134 | 135 | function currentPath() { 136 | var href = window.location.pathname; 137 | href = href.substr(href.lastIndexOf('/') + 1); 138 | return href === '' ? 'index.html' : href; 139 | } 140 | 141 | // Create search form 142 | function createForm(value) { 143 | if ($searchForm) $searchForm.remove(); 144 | if ($searchLabel) $searchLabel.remove(); 145 | if ($searchInput) $searchInput.remove(); 146 | 147 | $searchForm = $('
    ', { 148 | 'class': 'book-search', 149 | 'role': 'search' 150 | }); 151 | 152 | $searchLabel = $('