├── .Rbuildignore ├── .github ├── .gitignore └── workflows │ ├── R-CMD-check.yaml │ ├── pkgdown.yaml │ └── pr-commands.yaml ├── .gitignore ├── DESCRIPTION ├── LICENSE.md ├── NAMESPACE ├── NEWS.md ├── R ├── data.R ├── doughut.R ├── from_stplanr.R ├── helper_functions.R ├── labels.R ├── plot.R ├── quadrat.R ├── segment.R ├── utils.R ├── zb_lines.R └── zone.R ├── README.Rmd ├── README.md ├── _pkgdown.yml ├── cran-comments.md ├── data-raw ├── London_examples.R ├── crashes.R ├── create_NLD_cities.R ├── fix-polygons.R ├── london-figures.R └── london.R ├── data ├── london_area.rda ├── london_area_lonlat.rda ├── london_cent.rda ├── london_cent_lonlat.rda └── zb_100_triangular_numbers.rda ├── inst └── test-cities.R ├── man ├── figures │ ├── README-unnamed-chunk-2-1.png │ ├── README-unnamed-chunk-3-1.png │ ├── README-unnamed-chunk-4-1.png │ ├── README-unnamed-chunk-4-2.png │ ├── README-unnamed-chunk-5-1.png │ ├── README-unnamed-chunk-5-2.png │ ├── README-unnamed-chunk-5-3.png │ ├── README-unnamed-chunk-6-1.png │ ├── README-unnamed-chunk-7-1.png │ └── README-unnamed-chunk-8-1.png ├── geo_select_aeq.Rd ├── london_area.Rd ├── zb_100_triangular_numbers.Rd ├── zb_color.Rd ├── zb_doughnut.Rd ├── zb_lines.Rd ├── zb_plot.Rd ├── zb_quadrat.Rd ├── zb_segment.Rd └── zb_zone.Rd ├── paper ├── figure_cycling_accidents.R ├── figure_world_cities.R ├── foss4g │ └── foss4g-zonebuilder-abstract.md └── tables.R ├── sandbox └── Dutch_city_population.R ├── tic.R ├── vignettes ├── .gitignore ├── demo_dutch_cities.Rmd ├── hackathon.Rmd ├── paper.Rmd └── references.bib └── zonebuilder.Rproj /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^README\.Rmd$ 4 | ^LICENSE\.md$ 5 | ^data-raw$ 6 | ^paper$ 7 | ^sandbox$ 8 | ^_pkgdown\.yml$ 9 | ^docs$ 10 | ^pkgdown$ 11 | ^\.travis\.yml$ 12 | ^tic\.R$ 13 | ^codecov\.yml$ 14 | ^\.github$ 15 | ^cran-comments\.md$ 16 | ^CRAN-RELEASE$ 17 | ^zonebuilder-paper\.pdf$ 18 | ^zonebuilder-paper\.tex$ 19 | ^zonebuilder-paper_files$ 20 | ^vignettes/josis*$ 21 | ^\.ccache$ 22 | ^CRAN-SUBMISSION$ 23 | -------------------------------------------------------------------------------- /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # For help debugging build failures open an issue on the RStudio community with the 'github-actions' tag. 2 | # https://community.rstudio.com/new-topic?category=Package%20development&tags=github-actions 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - master 8 | pull_request: 9 | branches: 10 | - main 11 | - master 12 | 13 | name: R-CMD-check 14 | 15 | jobs: 16 | R-CMD-check: 17 | runs-on: ${{ matrix.config.os }} 18 | 19 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 20 | 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | config: 25 | - {os: windows-latest, r: 'release'} 26 | # - {os: macOS-latest, r: 'release'} 27 | - {os: ubuntu-20.04, r: 'release', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} 28 | - {os: ubuntu-20.04, r: 'devel', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest", http-user-agent: "R/4.1.0 (ubuntu-20.04) R (4.1.0 x86_64-pc-linux-gnu x86_64 linux-gnu) on GitHub Actions" } 29 | 30 | env: 31 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 32 | RSPM: ${{ matrix.config.rspm }} 33 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 34 | 35 | steps: 36 | - uses: actions/checkout@v2 37 | 38 | - uses: r-lib/actions/setup-r@v1 39 | with: 40 | r-version: ${{ matrix.config.r }} 41 | 42 | - uses: r-lib/actions/setup-pandoc@v1 43 | 44 | - name: Query dependencies 45 | run: | 46 | install.packages('remotes') 47 | saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2) 48 | writeLines(sprintf("R-%i.%i", getRversion()$major, getRversion()$minor), ".github/R-version") 49 | shell: Rscript {0} 50 | 51 | - name: Restore R package cache 52 | uses: actions/cache@v2 53 | with: 54 | path: ${{ env.R_LIBS_USER }} 55 | key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('.github/depends.Rds') }} 56 | restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1- 57 | 58 | - name: Install system dependencies 59 | if: runner.os == 'Linux' 60 | run: | 61 | while read -r cmd 62 | do 63 | eval sudo $cmd 64 | done < <(Rscript -e 'writeLines(remotes::system_requirements("ubuntu", "20.04"))') 65 | 66 | - name: Install dependencies 67 | run: | 68 | remotes::install_deps(dependencies = TRUE) 69 | remotes::install_cran("rcmdcheck") 70 | shell: Rscript {0} 71 | 72 | - name: Check 73 | env: 74 | _R_CHECK_CRAN_INCOMING_REMOTE_: false 75 | run: | 76 | options(crayon.enabled = TRUE) 77 | rcmdcheck::rcmdcheck(args = c("--no-manual", "--as-cran"), error_on = "warning", check_dir = "check") 78 | shell: Rscript {0} 79 | 80 | - name: Upload check results 81 | if: failure() 82 | uses: actions/upload-artifact@main 83 | with: 84 | name: ${{ runner.os }}-r${{ matrix.config.r }}-results 85 | path: check 86 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | release: 8 | types: [published] 9 | workflow_dispatch: 10 | 11 | name: pkgdown.yaml 12 | 13 | permissions: read-all 14 | 15 | jobs: 16 | pkgdown: 17 | runs-on: ubuntu-latest 18 | # Only restrict concurrency for non-PR jobs 19 | concurrency: 20 | group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} 21 | env: 22 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 23 | permissions: 24 | contents: write 25 | steps: 26 | - uses: actions/checkout@v4 27 | 28 | - uses: r-lib/actions/setup-pandoc@v2 29 | 30 | - uses: r-lib/actions/setup-r@v2 31 | with: 32 | use-public-rspm: true 33 | 34 | - uses: r-lib/actions/setup-r-dependencies@v2 35 | with: 36 | extra-packages: any::pkgdown, local::. 37 | needs: website 38 | 39 | - name: Build site 40 | run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) 41 | shell: Rscript {0} 42 | 43 | - name: Deploy to GitHub pages 🚀 44 | if: github.event_name != 'pull_request' 45 | uses: JamesIves/github-pages-deploy-action@v4.5.0 46 | with: 47 | clean: false 48 | branch: gh-pages 49 | folder: docs 50 | -------------------------------------------------------------------------------- /.github/workflows/pr-commands.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | issue_comment: 3 | types: [created] 4 | name: Commands 5 | jobs: 6 | document: 7 | if: startsWith(github.event.comment.body, '/document') 8 | name: document 9 | runs-on: macOS-latest 10 | env: 11 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 12 | steps: 13 | - uses: actions/checkout@v2 14 | - uses: r-lib/actions/pr-fetch@v1 15 | with: 16 | repo-token: ${{ secrets.GITHUB_TOKEN }} 17 | - uses: r-lib/actions/setup-r@v1 18 | - name: Install dependencies 19 | run: Rscript -e 'install.packages(c("remotes", "roxygen2"))' -e 'remotes::install_deps(dependencies = TRUE)' 20 | - name: Document 21 | run: Rscript -e 'roxygen2::roxygenise()' 22 | - name: commit 23 | run: | 24 | git config --local user.email "actions@github.com" 25 | git config --local user.name "GitHub Actions" 26 | git add man/\* NAMESPACE 27 | git commit -m 'Document' 28 | - uses: r-lib/actions/pr-push@v1 29 | with: 30 | repo-token: ${{ secrets.GITHUB_TOKEN }} 31 | style: 32 | if: startsWith(github.event.comment.body, '/style') 33 | name: style 34 | runs-on: macOS-latest 35 | env: 36 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 37 | steps: 38 | - uses: actions/checkout@v2 39 | - uses: r-lib/actions/pr-fetch@v1 40 | with: 41 | repo-token: ${{ secrets.GITHUB_TOKEN }} 42 | - uses: r-lib/actions/setup-r@v1 43 | - name: Install dependencies 44 | run: Rscript -e 'install.packages("styler")' 45 | - name: Style 46 | run: Rscript -e 'styler::style_pkg()' 47 | - name: commit 48 | run: | 49 | git config --local user.email "actions@github.com" 50 | git config --local user.name "GitHub Actions" 51 | git add \*.R 52 | git commit -m 'Style' 53 | - uses: r-lib/actions/pr-push@v1 54 | with: 55 | repo-token: ${{ secrets.GITHUB_TOKEN }} 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | inst/doc 6 | paper/figures 7 | docs/ 8 | *.Rds 9 | *.zip 10 | vignettes/josis* 11 | *.log 12 | *.pdf 13 | *.tex 14 | *.rds 15 | data-raw/2* 16 | *cache* 17 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: zonebuilder 2 | Title: Create and Explore Geographic Zoning Systems 3 | Version: 0.1.0 4 | Authors@R: 5 | c(person(given = "Robin", 6 | family = "Lovelace", 7 | role = c("aut", "cre"), 8 | email = "rob00x@gmail.com", 9 | comment = c(ORCID = "0000-0001-5679-6536")), 10 | person(given = "Martijn", 11 | family = "Tennekes", 12 | role = "aut", 13 | email = "mtennekes@gmail.com")) 14 | Description: Functions, documentation and example data to help divide 15 | geographic space into discrete polygons (zones). 16 | The package supports new zoning systems that are documented in the 17 | accompanying paper, 18 | "ClockBoard: A zoning system for urban analysis", 19 | by Lovelace et al. (2022) . 20 | The functions are motivated by research into the merits of different zoning systems 21 | (Openshaw, 1977) . A flexible ClockBoard zoning system is 22 | provided, which breaks-up space by concentric rings 23 | and radial lines emanating from a central point. 24 | By default, the diameter of the rings grow according to the triangular number sequence 25 | (Ross & Knott, 2019) with the first 4 doughnuts 26 | (or annuli) measuring 1, 3, 6, and 10 km wide. 27 | These annuli are subdivided into equal segments (12 by default), creating the 28 | visual impression of a dartboard. Zones are labelled according to 29 | distance to the centre and angular distance from North, creating a simple 30 | geographic zoning and labelling system useful for visualising geographic 31 | phenomena with a clearly demarcated central location such as cities. 32 | License: GPL-3 33 | BugReports: https://github.com/zonebuilders/zonebuilder/issues 34 | Depends: 35 | R (>= 3.5.0) 36 | Imports: 37 | sf, 38 | RColorBrewer, 39 | graphics, 40 | grDevices 41 | Suggests: 42 | knitr, 43 | rmarkdown, 44 | tmap, 45 | tmaptools, 46 | dplyr, 47 | lwgeom, 48 | leaflet, 49 | covr, 50 | bookdown 51 | VignetteBuilder: 52 | knitr 53 | URL: https://github.com/zonebuilders/zonebuilder, https://zonebuilders.github.io/zonebuilder/ 54 | Encoding: UTF-8 55 | LazyData: true 56 | RoxygenNote: 7.3.2 57 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | GNU General Public License 2 | ========================== 3 | 4 | _Version 3, 29 June 2007_ 5 | _Copyright © 2007 Free Software Foundation, Inc. <>_ 6 | 7 | Everyone is permitted to copy and distribute verbatim copies of this license 8 | document, but changing it is not allowed. 9 | 10 | ## Preamble 11 | 12 | The GNU General Public License is a free, copyleft license for software and other 13 | kinds of works. 14 | 15 | The licenses for most software and other practical works are designed to take away 16 | your freedom to share and change the works. By contrast, the GNU General Public 17 | License is intended to guarantee your freedom to share and change all versions of a 18 | program--to make sure it remains free software for all its users. We, the Free 19 | Software Foundation, use the GNU General Public License for most of our software; it 20 | applies also to any other work released this way by its authors. You can apply it to 21 | your programs, too. 22 | 23 | When we speak of free software, we are referring to freedom, not price. Our General 24 | Public Licenses are designed to make sure that you have the freedom to distribute 25 | copies of free software (and charge for them if you wish), that you receive source 26 | code or can get it if you want it, that you can change the software or use pieces of 27 | it in new free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you these rights or 30 | asking you to surrender the rights. Therefore, you have certain responsibilities if 31 | you distribute copies of the software, or if you modify it: responsibilities to 32 | respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether gratis or for a fee, 35 | you must pass on to the recipients the same freedoms that you received. You must make 36 | sure that they, too, receive or can get the source code. And you must show them these 37 | terms so they know their rights. 38 | 39 | Developers that use the GNU GPL protect your rights with two steps: **(1)** assert 40 | copyright on the software, and **(2)** offer you this License giving you legal permission 41 | to copy, distribute and/or modify it. 42 | 43 | For the developers' and authors' protection, the GPL clearly explains that there is 44 | no warranty for this free software. For both users' and authors' sake, the GPL 45 | requires that modified versions be marked as changed, so that their problems will not 46 | be attributed erroneously to authors of previous versions. 47 | 48 | Some devices are designed to deny users access to install or run modified versions of 49 | the software inside them, although the manufacturer can do so. This is fundamentally 50 | incompatible with the aim of protecting users' freedom to change the software. The 51 | systematic pattern of such abuse occurs in the area of products for individuals to 52 | use, which is precisely where it is most unacceptable. Therefore, we have designed 53 | this version of the GPL to prohibit the practice for those products. If such problems 54 | arise substantially in other domains, we stand ready to extend this provision to 55 | those domains in future versions of the GPL, as needed to protect the freedom of 56 | users. 57 | 58 | Finally, every program is threatened constantly by software patents. States should 59 | not allow patents to restrict development and use of software on general-purpose 60 | computers, but in those that do, we wish to avoid the special danger that patents 61 | applied to a free program could make it effectively proprietary. To prevent this, the 62 | GPL assures that patents cannot be used to render the program non-free. 63 | 64 | The precise terms and conditions for copying, distribution and modification follow. 65 | 66 | ## TERMS AND CONDITIONS 67 | 68 | ### 0. Definitions 69 | 70 | “This License” refers to version 3 of the GNU General Public License. 71 | 72 | “Copyright” also means copyright-like laws that apply to other kinds of 73 | works, such as semiconductor masks. 74 | 75 | “The Program” refers to any copyrightable work licensed under this 76 | License. Each licensee is addressed as “you”. “Licensees” and 77 | “recipients” may be individuals or organizations. 78 | 79 | To “modify” a work means to copy from or adapt all or part of the work in 80 | a fashion requiring copyright permission, other than the making of an exact copy. The 81 | resulting work is called a “modified version” of the earlier work or a 82 | work “based on” the earlier work. 83 | 84 | A “covered work” means either the unmodified Program or a work based on 85 | the Program. 86 | 87 | To “propagate” a work means to do anything with it that, without 88 | permission, would make you directly or secondarily liable for infringement under 89 | applicable copyright law, except executing it on a computer or modifying a private 90 | copy. Propagation includes copying, distribution (with or without modification), 91 | making available to the public, and in some countries other activities as well. 92 | 93 | To “convey” a work means any kind of propagation that enables other 94 | parties to make or receive copies. Mere interaction with a user through a computer 95 | network, with no transfer of a copy, is not conveying. 96 | 97 | An interactive user interface displays “Appropriate Legal Notices” to the 98 | extent that it includes a convenient and prominently visible feature that **(1)** 99 | displays an appropriate copyright notice, and **(2)** tells the user that there is no 100 | warranty for the work (except to the extent that warranties are provided), that 101 | licensees may convey the work under this License, and how to view a copy of this 102 | License. If the interface presents a list of user commands or options, such as a 103 | menu, a prominent item in the list meets this criterion. 104 | 105 | ### 1. Source Code 106 | 107 | The “source code” for a work means the preferred form of the work for 108 | making modifications to it. “Object code” means any non-source form of a 109 | work. 110 | 111 | A “Standard Interface” means an interface that either is an official 112 | standard defined by a recognized standards body, or, in the case of interfaces 113 | specified for a particular programming language, one that is widely used among 114 | developers working in that language. 115 | 116 | The “System Libraries” of an executable work include anything, other than 117 | the work as a whole, that **(a)** is included in the normal form of packaging a Major 118 | Component, but which is not part of that Major Component, and **(b)** serves only to 119 | enable use of the work with that Major Component, or to implement a Standard 120 | Interface for which an implementation is available to the public in source code form. 121 | A “Major Component”, in this context, means a major essential component 122 | (kernel, window system, and so on) of the specific operating system (if any) on which 123 | the executable work runs, or a compiler used to produce the work, or an object code 124 | interpreter used to run it. 125 | 126 | The “Corresponding Source” for a work in object code form means all the 127 | source code needed to generate, install, and (for an executable work) run the object 128 | code and to modify the work, including scripts to control those activities. However, 129 | it does not include the work's System Libraries, or general-purpose tools or 130 | generally available free programs which are used unmodified in performing those 131 | activities but which are not part of the work. For example, Corresponding Source 132 | includes interface definition files associated with source files for the work, and 133 | the source code for shared libraries and dynamically linked subprograms that the work 134 | is specifically designed to require, such as by intimate data communication or 135 | control flow between those subprograms and other parts of the work. 136 | 137 | The Corresponding Source need not include anything that users can regenerate 138 | automatically from other parts of the Corresponding Source. 139 | 140 | The Corresponding Source for a work in source code form is that same work. 141 | 142 | ### 2. Basic Permissions 143 | 144 | All rights granted under this License are granted for the term of copyright on the 145 | Program, and are irrevocable provided the stated conditions are met. This License 146 | explicitly affirms your unlimited permission to run the unmodified Program. The 147 | output from running a covered work is covered by this License only if the output, 148 | given its content, constitutes a covered work. This License acknowledges your rights 149 | of fair use or other equivalent, as provided by copyright law. 150 | 151 | You may make, run and propagate covered works that you do not convey, without 152 | conditions so long as your license otherwise remains in force. You may convey covered 153 | works to others for the sole purpose of having them make modifications exclusively 154 | for you, or provide you with facilities for running those works, provided that you 155 | comply with the terms of this License in conveying all material for which you do not 156 | control copyright. Those thus making or running the covered works for you must do so 157 | exclusively on your behalf, under your direction and control, on terms that prohibit 158 | them from making any copies of your copyrighted material outside their relationship 159 | with you. 160 | 161 | Conveying under any other circumstances is permitted solely under the conditions 162 | stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 163 | 164 | ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law 165 | 166 | No covered work shall be deemed part of an effective technological measure under any 167 | applicable law fulfilling obligations under article 11 of the WIPO copyright treaty 168 | adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention 169 | of such measures. 170 | 171 | When you convey a covered work, you waive any legal power to forbid circumvention of 172 | technological measures to the extent such circumvention is effected by exercising 173 | rights under this License with respect to the covered work, and you disclaim any 174 | intention to limit operation or modification of the work as a means of enforcing, 175 | against the work's users, your or third parties' legal rights to forbid circumvention 176 | of technological measures. 177 | 178 | ### 4. Conveying Verbatim Copies 179 | 180 | You may convey verbatim copies of the Program's source code as you receive it, in any 181 | medium, provided that you conspicuously and appropriately publish on each copy an 182 | appropriate copyright notice; keep intact all notices stating that this License and 183 | any non-permissive terms added in accord with section 7 apply to the code; keep 184 | intact all notices of the absence of any warranty; and give all recipients a copy of 185 | this License along with the Program. 186 | 187 | You may charge any price or no price for each copy that you convey, and you may offer 188 | support or warranty protection for a fee. 189 | 190 | ### 5. Conveying Modified Source Versions 191 | 192 | You may convey a work based on the Program, or the modifications to produce it from 193 | the Program, in the form of source code under the terms of section 4, provided that 194 | you also meet all of these conditions: 195 | 196 | * **a)** The work must carry prominent notices stating that you modified it, and giving a 197 | relevant date. 198 | * **b)** The work must carry prominent notices stating that it is released under this 199 | License and any conditions added under section 7. This requirement modifies the 200 | requirement in section 4 to “keep intact all notices”. 201 | * **c)** You must license the entire work, as a whole, under this License to anyone who 202 | comes into possession of a copy. This License will therefore apply, along with any 203 | applicable section 7 additional terms, to the whole of the work, and all its parts, 204 | regardless of how they are packaged. This License gives no permission to license the 205 | work in any other way, but it does not invalidate such permission if you have 206 | separately received it. 207 | * **d)** If the work has interactive user interfaces, each must display Appropriate Legal 208 | Notices; however, if the Program has interactive interfaces that do not display 209 | Appropriate Legal Notices, your work need not make them do so. 210 | 211 | A compilation of a covered work with other separate and independent works, which are 212 | not by their nature extensions of the covered work, and which are not combined with 213 | it such as to form a larger program, in or on a volume of a storage or distribution 214 | medium, is called an “aggregate” if the compilation and its resulting 215 | copyright are not used to limit the access or legal rights of the compilation's users 216 | beyond what the individual works permit. Inclusion of a covered work in an aggregate 217 | does not cause this License to apply to the other parts of the aggregate. 218 | 219 | ### 6. Conveying Non-Source Forms 220 | 221 | You may convey a covered work in object code form under the terms of sections 4 and 222 | 5, provided that you also convey the machine-readable Corresponding Source under the 223 | terms of this License, in one of these ways: 224 | 225 | * **a)** Convey the object code in, or embodied in, a physical product (including a 226 | physical distribution medium), accompanied by the Corresponding Source fixed on a 227 | durable physical medium customarily used for software interchange. 228 | * **b)** Convey the object code in, or embodied in, a physical product (including a 229 | physical distribution medium), accompanied by a written offer, valid for at least 230 | three years and valid for as long as you offer spare parts or customer support for 231 | that product model, to give anyone who possesses the object code either **(1)** a copy of 232 | the Corresponding Source for all the software in the product that is covered by this 233 | License, on a durable physical medium customarily used for software interchange, for 234 | a price no more than your reasonable cost of physically performing this conveying of 235 | source, or **(2)** access to copy the Corresponding Source from a network server at no 236 | charge. 237 | * **c)** Convey individual copies of the object code with a copy of the written offer to 238 | provide the Corresponding Source. This alternative is allowed only occasionally and 239 | noncommercially, and only if you received the object code with such an offer, in 240 | accord with subsection 6b. 241 | * **d)** Convey the object code by offering access from a designated place (gratis or for 242 | a charge), and offer equivalent access to the Corresponding Source in the same way 243 | through the same place at no further charge. You need not require recipients to copy 244 | the Corresponding Source along with the object code. If the place to copy the object 245 | code is a network server, the Corresponding Source may be on a different server 246 | (operated by you or a third party) that supports equivalent copying facilities, 247 | provided you maintain clear directions next to the object code saying where to find 248 | the Corresponding Source. Regardless of what server hosts the Corresponding Source, 249 | you remain obligated to ensure that it is available for as long as needed to satisfy 250 | these requirements. 251 | * **e)** Convey the object code using peer-to-peer transmission, provided you inform 252 | other peers where the object code and Corresponding Source of the work are being 253 | offered to the general public at no charge under subsection 6d. 254 | 255 | A separable portion of the object code, whose source code is excluded from the 256 | Corresponding Source as a System Library, need not be included in conveying the 257 | object code work. 258 | 259 | A “User Product” is either **(1)** a “consumer product”, which 260 | means any tangible personal property which is normally used for personal, family, or 261 | household purposes, or **(2)** anything designed or sold for incorporation into a 262 | dwelling. In determining whether a product is a consumer product, doubtful cases 263 | shall be resolved in favor of coverage. For a particular product received by a 264 | particular user, “normally used” refers to a typical or common use of 265 | that class of product, regardless of the status of the particular user or of the way 266 | in which the particular user actually uses, or expects or is expected to use, the 267 | product. A product is a consumer product regardless of whether the product has 268 | substantial commercial, industrial or non-consumer uses, unless such uses represent 269 | the only significant mode of use of the product. 270 | 271 | “Installation Information” for a User Product means any methods, 272 | procedures, authorization keys, or other information required to install and execute 273 | modified versions of a covered work in that User Product from a modified version of 274 | its Corresponding Source. The information must suffice to ensure that the continued 275 | functioning of the modified object code is in no case prevented or interfered with 276 | solely because modification has been made. 277 | 278 | If you convey an object code work under this section in, or with, or specifically for 279 | use in, a User Product, and the conveying occurs as part of a transaction in which 280 | the right of possession and use of the User Product is transferred to the recipient 281 | in perpetuity or for a fixed term (regardless of how the transaction is 282 | characterized), the Corresponding Source conveyed under this section must be 283 | accompanied by the Installation Information. But this requirement does not apply if 284 | neither you nor any third party retains the ability to install modified object code 285 | on the User Product (for example, the work has been installed in ROM). 286 | 287 | The requirement to provide Installation Information does not include a requirement to 288 | continue to provide support service, warranty, or updates for a work that has been 289 | modified or installed by the recipient, or for the User Product in which it has been 290 | modified or installed. Access to a network may be denied when the modification itself 291 | materially and adversely affects the operation of the network or violates the rules 292 | and protocols for communication across the network. 293 | 294 | Corresponding Source conveyed, and Installation Information provided, in accord with 295 | this section must be in a format that is publicly documented (and with an 296 | implementation available to the public in source code form), and must require no 297 | special password or key for unpacking, reading or copying. 298 | 299 | ### 7. Additional Terms 300 | 301 | “Additional permissions” are terms that supplement the terms of this 302 | License by making exceptions from one or more of its conditions. Additional 303 | permissions that are applicable to the entire Program shall be treated as though they 304 | were included in this License, to the extent that they are valid under applicable 305 | law. If additional permissions apply only to part of the Program, that part may be 306 | used separately under those permissions, but the entire Program remains governed by 307 | this License without regard to the additional permissions. 308 | 309 | When you convey a copy of a covered work, you may at your option remove any 310 | additional permissions from that copy, or from any part of it. (Additional 311 | permissions may be written to require their own removal in certain cases when you 312 | modify the work.) You may place additional permissions on material, added by you to a 313 | covered work, for which you have or can give appropriate copyright permission. 314 | 315 | Notwithstanding any other provision of this License, for material you add to a 316 | covered work, you may (if authorized by the copyright holders of that material) 317 | supplement the terms of this License with terms: 318 | 319 | * **a)** Disclaiming warranty or limiting liability differently from the terms of 320 | sections 15 and 16 of this License; or 321 | * **b)** Requiring preservation of specified reasonable legal notices or author 322 | attributions in that material or in the Appropriate Legal Notices displayed by works 323 | containing it; or 324 | * **c)** Prohibiting misrepresentation of the origin of that material, or requiring that 325 | modified versions of such material be marked in reasonable ways as different from the 326 | original version; or 327 | * **d)** Limiting the use for publicity purposes of names of licensors or authors of the 328 | material; or 329 | * **e)** Declining to grant rights under trademark law for use of some trade names, 330 | trademarks, or service marks; or 331 | * **f)** Requiring indemnification of licensors and authors of that material by anyone 332 | who conveys the material (or modified versions of it) with contractual assumptions of 333 | liability to the recipient, for any liability that these contractual assumptions 334 | directly impose on those licensors and authors. 335 | 336 | All other non-permissive additional terms are considered “further 337 | restrictions” within the meaning of section 10. If the Program as you received 338 | it, or any part of it, contains a notice stating that it is governed by this License 339 | along with a term that is a further restriction, you may remove that term. If a 340 | license document contains a further restriction but permits relicensing or conveying 341 | under this License, you may add to a covered work material governed by the terms of 342 | that license document, provided that the further restriction does not survive such 343 | relicensing or conveying. 344 | 345 | If you add terms to a covered work in accord with this section, you must place, in 346 | the relevant source files, a statement of the additional terms that apply to those 347 | files, or a notice indicating where to find the applicable terms. 348 | 349 | Additional terms, permissive or non-permissive, may be stated in the form of a 350 | separately written license, or stated as exceptions; the above requirements apply 351 | either way. 352 | 353 | ### 8. Termination 354 | 355 | You may not propagate or modify a covered work except as expressly provided under 356 | this License. Any attempt otherwise to propagate or modify it is void, and will 357 | automatically terminate your rights under this License (including any patent licenses 358 | granted under the third paragraph of section 11). 359 | 360 | However, if you cease all violation of this License, then your license from a 361 | particular copyright holder is reinstated **(a)** provisionally, unless and until the 362 | copyright holder explicitly and finally terminates your license, and **(b)** permanently, 363 | if the copyright holder fails to notify you of the violation by some reasonable means 364 | prior to 60 days after the cessation. 365 | 366 | Moreover, your license from a particular copyright holder is reinstated permanently 367 | if the copyright holder notifies you of the violation by some reasonable means, this 368 | is the first time you have received notice of violation of this License (for any 369 | work) from that copyright holder, and you cure the violation prior to 30 days after 370 | your receipt of the notice. 371 | 372 | Termination of your rights under this section does not terminate the licenses of 373 | parties who have received copies or rights from you under this License. If your 374 | rights have been terminated and not permanently reinstated, you do not qualify to 375 | receive new licenses for the same material under section 10. 376 | 377 | ### 9. Acceptance Not Required for Having Copies 378 | 379 | You are not required to accept this License in order to receive or run a copy of the 380 | Program. Ancillary propagation of a covered work occurring solely as a consequence of 381 | using peer-to-peer transmission to receive a copy likewise does not require 382 | acceptance. However, nothing other than this License grants you permission to 383 | propagate or modify any covered work. These actions infringe copyright if you do not 384 | accept this License. Therefore, by modifying or propagating a covered work, you 385 | indicate your acceptance of this License to do so. 386 | 387 | ### 10. Automatic Licensing of Downstream Recipients 388 | 389 | Each time you convey a covered work, the recipient automatically receives a license 390 | from the original licensors, to run, modify and propagate that work, subject to this 391 | License. You are not responsible for enforcing compliance by third parties with this 392 | License. 393 | 394 | An “entity transaction” is a transaction transferring control of an 395 | organization, or substantially all assets of one, or subdividing an organization, or 396 | merging organizations. If propagation of a covered work results from an entity 397 | transaction, each party to that transaction who receives a copy of the work also 398 | receives whatever licenses to the work the party's predecessor in interest had or 399 | could give under the previous paragraph, plus a right to possession of the 400 | Corresponding Source of the work from the predecessor in interest, if the predecessor 401 | has it or can get it with reasonable efforts. 402 | 403 | You may not impose any further restrictions on the exercise of the rights granted or 404 | affirmed under this License. For example, you may not impose a license fee, royalty, 405 | or other charge for exercise of rights granted under this License, and you may not 406 | initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging 407 | that any patent claim is infringed by making, using, selling, offering for sale, or 408 | importing the Program or any portion of it. 409 | 410 | ### 11. Patents 411 | 412 | A “contributor” is a copyright holder who authorizes use under this 413 | License of the Program or a work on which the Program is based. The work thus 414 | licensed is called the contributor's “contributor version”. 415 | 416 | A contributor's “essential patent claims” are all patent claims owned or 417 | controlled by the contributor, whether already acquired or hereafter acquired, that 418 | would be infringed by some manner, permitted by this License, of making, using, or 419 | selling its contributor version, but do not include claims that would be infringed 420 | only as a consequence of further modification of the contributor version. For 421 | purposes of this definition, “control” includes the right to grant patent 422 | sublicenses in a manner consistent with the requirements of this License. 423 | 424 | Each contributor grants you a non-exclusive, worldwide, royalty-free patent license 425 | under the contributor's essential patent claims, to make, use, sell, offer for sale, 426 | import and otherwise run, modify and propagate the contents of its contributor 427 | version. 428 | 429 | In the following three paragraphs, a “patent license” is any express 430 | agreement or commitment, however denominated, not to enforce a patent (such as an 431 | express permission to practice a patent or covenant not to sue for patent 432 | infringement). To “grant” such a patent license to a party means to make 433 | such an agreement or commitment not to enforce a patent against the party. 434 | 435 | If you convey a covered work, knowingly relying on a patent license, and the 436 | Corresponding Source of the work is not available for anyone to copy, free of charge 437 | and under the terms of this License, through a publicly available network server or 438 | other readily accessible means, then you must either **(1)** cause the Corresponding 439 | Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the 440 | patent license for this particular work, or **(3)** arrange, in a manner consistent with 441 | the requirements of this License, to extend the patent license to downstream 442 | recipients. “Knowingly relying” means you have actual knowledge that, but 443 | for the patent license, your conveying the covered work in a country, or your 444 | recipient's use of the covered work in a country, would infringe one or more 445 | identifiable patents in that country that you have reason to believe are valid. 446 | 447 | If, pursuant to or in connection with a single transaction or arrangement, you 448 | convey, or propagate by procuring conveyance of, a covered work, and grant a patent 449 | license to some of the parties receiving the covered work authorizing them to use, 450 | propagate, modify or convey a specific copy of the covered work, then the patent 451 | license you grant is automatically extended to all recipients of the covered work and 452 | works based on it. 453 | 454 | A patent license is “discriminatory” if it does not include within the 455 | scope of its coverage, prohibits the exercise of, or is conditioned on the 456 | non-exercise of one or more of the rights that are specifically granted under this 457 | License. You may not convey a covered work if you are a party to an arrangement with 458 | a third party that is in the business of distributing software, under which you make 459 | payment to the third party based on the extent of your activity of conveying the 460 | work, and under which the third party grants, to any of the parties who would receive 461 | the covered work from you, a discriminatory patent license **(a)** in connection with 462 | copies of the covered work conveyed by you (or copies made from those copies), or **(b)** 463 | primarily for and in connection with specific products or compilations that contain 464 | the covered work, unless you entered into that arrangement, or that patent license 465 | was granted, prior to 28 March 2007. 466 | 467 | Nothing in this License shall be construed as excluding or limiting any implied 468 | license or other defenses to infringement that may otherwise be available to you 469 | under applicable patent law. 470 | 471 | ### 12. No Surrender of Others' Freedom 472 | 473 | If conditions are imposed on you (whether by court order, agreement or otherwise) 474 | that contradict the conditions of this License, they do not excuse you from the 475 | conditions of this License. If you cannot convey a covered work so as to satisfy 476 | simultaneously your obligations under this License and any other pertinent 477 | obligations, then as a consequence you may not convey it at all. For example, if you 478 | agree to terms that obligate you to collect a royalty for further conveying from 479 | those to whom you convey the Program, the only way you could satisfy both those terms 480 | and this License would be to refrain entirely from conveying the Program. 481 | 482 | ### 13. Use with the GNU Affero General Public License 483 | 484 | Notwithstanding any other provision of this License, you have permission to link or 485 | combine any covered work with a work licensed under version 3 of the GNU Affero 486 | General Public License into a single combined work, and to convey the resulting work. 487 | The terms of this License will continue to apply to the part which is the covered 488 | work, but the special requirements of the GNU Affero General Public License, section 489 | 13, concerning interaction through a network will apply to the combination as such. 490 | 491 | ### 14. Revised Versions of this License 492 | 493 | The Free Software Foundation may publish revised and/or new versions of the GNU 494 | General Public License from time to time. Such new versions will be similar in spirit 495 | to the present version, but may differ in detail to address new problems or concerns. 496 | 497 | Each version is given a distinguishing version number. If the Program specifies that 498 | a certain numbered version of the GNU General Public License “or any later 499 | version” applies to it, you have the option of following the terms and 500 | conditions either of that numbered version or of any later version published by the 501 | Free Software Foundation. If the Program does not specify a version number of the GNU 502 | General Public License, you may choose any version ever published by the Free 503 | Software Foundation. 504 | 505 | If the Program specifies that a proxy can decide which future versions of the GNU 506 | General Public License can be used, that proxy's public statement of acceptance of a 507 | version permanently authorizes you to choose that version for the Program. 508 | 509 | Later license versions may give you additional or different permissions. However, no 510 | additional obligations are imposed on any author or copyright holder as a result of 511 | your choosing to follow a later version. 512 | 513 | ### 15. Disclaimer of Warranty 514 | 515 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. 516 | EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 517 | PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER 518 | EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 519 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE 520 | QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE 521 | DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 522 | 523 | ### 16. Limitation of Liability 524 | 525 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY 526 | COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS 527 | PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, 528 | INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE 529 | PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE 530 | OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE 531 | WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 532 | POSSIBILITY OF SUCH DAMAGES. 533 | 534 | ### 17. Interpretation of Sections 15 and 16 535 | 536 | If the disclaimer of warranty and limitation of liability provided above cannot be 537 | given local legal effect according to their terms, reviewing courts shall apply local 538 | law that most closely approximates an absolute waiver of all civil liability in 539 | connection with the Program, unless a warranty or assumption of liability accompanies 540 | a copy of the Program in return for a fee. 541 | 542 | _END OF TERMS AND CONDITIONS_ 543 | 544 | ## How to Apply These Terms to Your New Programs 545 | 546 | If you develop a new program, and you want it to be of the greatest possible use to 547 | the public, the best way to achieve this is to make it free software which everyone 548 | can redistribute and change under these terms. 549 | 550 | To do so, attach the following notices to the program. It is safest to attach them 551 | to the start of each source file to most effectively state the exclusion of warranty; 552 | and each file should have at least the “copyright” line and a pointer to 553 | where the full notice is found. 554 | 555 | 556 | Copyright (C) 2019 Robin Lovelace 557 | 558 | This program is free software: you can redistribute it and/or modify 559 | it under the terms of the GNU General Public License as published by 560 | the Free Software Foundation, either version 3 of the License, or 561 | (at your option) any later version. 562 | 563 | This program is distributed in the hope that it will be useful, 564 | but WITHOUT ANY WARRANTY; without even the implied warranty of 565 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 566 | GNU General Public License for more details. 567 | 568 | You should have received a copy of the GNU General Public License 569 | along with this program. If not, see . 570 | 571 | Also add information on how to contact you by electronic and paper mail. 572 | 573 | If the program does terminal interaction, make it output a short notice like this 574 | when it starts in an interactive mode: 575 | 576 | zonebuilder Copyright (C) 2019 Robin Lovelace 577 | This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. 578 | This is free software, and you are welcome to redistribute it 579 | under certain conditions; type 'show c' for details. 580 | 581 | The hypothetical commands `show w` and `show c` should show the appropriate parts of 582 | the General Public License. Of course, your program's commands might be different; 583 | for a GUI interface, you would use an “about box”. 584 | 585 | You should also get your employer (if you work as a programmer) or school, if any, to 586 | sign a “copyright disclaimer” for the program, if necessary. For more 587 | information on this, and how to apply and follow the GNU GPL, see 588 | <>. 589 | 590 | The GNU General Public License does not permit incorporating your program into 591 | proprietary programs. If your program is a subroutine library, you may consider it 592 | more useful to permit linking proprietary applications with the library. If this is 593 | what you want to do, use the GNU Lesser General Public License instead of this 594 | License. But first, please read 595 | <>. 596 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(geo_select_aeq,sf) 4 | S3method(geo_select_aeq,sfc) 5 | export(london_a) 6 | export(london_c) 7 | export(zb_color) 8 | export(zb_doughnut) 9 | export(zb_lines) 10 | export(zb_plot) 11 | export(zb_quadrat) 12 | export(zb_segment) 13 | export(zb_zone) 14 | import(sf) 15 | importFrom(RColorBrewer,brewer.pal) 16 | importFrom(grDevices,hcl) 17 | importFrom(graphics,mtext) 18 | importFrom(graphics,par) 19 | importFrom(graphics,plot) 20 | importFrom(graphics,text) 21 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # zonebuilder 0.1.0 (2025-02) 2 | 3 | * Various issues fixed for CRAN (#36) 4 | * Removal of `zb_view()` function 5 | * Addition of citation to documentation 6 | 7 | # zonebuilder 0.0.2 8 | 9 | * Added a `NEWS.md` file to track changes to the package. 10 | * Works with `sf` version 1.0-1 and above 11 | -------------------------------------------------------------------------------- /R/data.R: -------------------------------------------------------------------------------- 1 | #' Region representing London in projected coordinate system 2 | #' 3 | #' `london_a()` and `london_c()` return the city boundaries and centre 4 | #' point of London, respectively. 5 | #' 6 | #' @note `london_a()` returns a projected version of `lnd` in `spDataLarge`. 7 | #' See the `data-raw` folder in the package's repo to reproduce these datasets 8 | #' The `lonlat` versions of the data have coordinates in units of degrees. 9 | #' 10 | #' @docType data 11 | #' @keywords datasets 12 | #' @name london_area 13 | #' @aliases london_cent london_c london_a london_cent_lonlat london_area_lonlat 14 | #' @export 15 | #' @examples 16 | #' plot(london_a(), reset = FALSE) 17 | #' plot(london_c(), add = TRUE) 18 | london_a = function() { 19 | sf::st_set_crs(zonebuilder::london_area, 27700) 20 | } 21 | #' @rdname london_area 22 | #' @export 23 | london_c = function() { 24 | sf::st_set_crs(zonebuilder::london_cent, 27700) 25 | } 26 | 27 | #' The first 100 triangular numbers 28 | #' 29 | #' The first 100 in the sequence of [triangular numbers](https://en.wikipedia.org/wiki/Triangular_number) 30 | #' 31 | #' @note See the `data-raw` folder in the package's repo to reproduce these datasets 32 | #' 33 | #' @docType data 34 | #' @keywords datasets 35 | #' @name zb_100_triangular_numbers 36 | NULL 37 | -------------------------------------------------------------------------------- /R/doughut.R: -------------------------------------------------------------------------------- 1 | #' Make doughnuts 2 | #' 3 | #' @inheritParams zb_zone 4 | #' 5 | #' @return An `sf` data frame 6 | #' @export 7 | #' @examples 8 | #' zb_plot(zb_doughnut(london_c(), london_a())) 9 | zb_doughnut = function(x = NULL, 10 | area = NULL, 11 | n_circles = NA, 12 | distance = 1, 13 | distance_growth = 1) { 14 | zb_zone(x = x, area = area, n_circles = n_circles, distance = distance, distance_growth = distance_growth, n_segments = 1) 15 | 16 | } 17 | 18 | create_rings = function(point, n_circles, distance) { 19 | csdistance = cumsum(distance) 20 | circles = lapply(csdistance * 1000, function(d) { 21 | doughnut_i = sf::st_buffer(point, d) 22 | }) 23 | 24 | doughnuts_non_center = mapply(function(x, y) sf::st_sf(geometry = sf::st_difference(x, y)), 25 | circles[-1], 26 | circles[-n_circles], 27 | SIMPLIFY = FALSE) 28 | 29 | doughnuts = do.call(rbind, 30 | c(list(sf::st_sf(geometry = circles[[1]])), 31 | doughnuts_non_center)) 32 | 33 | doughnuts 34 | } 35 | -------------------------------------------------------------------------------- /R/from_stplanr.R: -------------------------------------------------------------------------------- 1 | #' Azimuthal Equidistant Projection 2 | #' 3 | #' @title Azimuthal Equidistant Projection 4 | #' @name geo_select_aeq 5 | #' @description Returns a CRS string for an Azimuthal Equidistant projection centered on the midpoint of an sf object's coordinates. 6 | #' 7 | #' @param shp An sf object. 8 | #' @return A CRS string for an Azimuthal Equidistant projection. 9 | #' @export 10 | geo_select_aeq.sf = function (shp) { 11 | #cent <- sf::st_geometry(shp) 12 | coords <- sf::st_coordinates(shp) 13 | coords_mat <- matrix(coords[, 1:2], ncol = 2) 14 | midpoint <- apply(coords_mat, 2, mean) 15 | aeqd <- sprintf("+proj=aeqd +lat_0=%s +lon_0=%s +x_0=0 +y_0=0", 16 | midpoint[2], midpoint[1]) 17 | sf::st_crs(aeqd) 18 | } 19 | 20 | #' @rdname geo_select_aeq 21 | #' @export 22 | geo_select_aeq.sfc = function (shp) { 23 | #cent <- sf::st_geometry(shp) 24 | coords <- sf::st_coordinates(shp) 25 | coords_mat <- matrix(coords[, 1:2], ncol = 2) 26 | midpoint <- apply(coords_mat, 2, mean) 27 | aeqd <- sprintf("+proj=aeqd +lat_0=%s +lon_0=%s +x_0=0 +y_0=0", 28 | midpoint[2], midpoint[1]) 29 | sf::st_crs(aeqd) 30 | } 31 | 32 | #' @rdname geo_select_aeq 33 | geo_select_aeq = function (shp) { 34 | UseMethod("geo_select_aeq") 35 | } 36 | 37 | 38 | geo_project = function(shp) { 39 | crs = geo_select_aeq(shp) 40 | sf::st_transform(shp, crs = crs) 41 | } 42 | -------------------------------------------------------------------------------- /R/helper_functions.R: -------------------------------------------------------------------------------- 1 | get_angles = function(n_segments = 4, starting_angle = -45, angles_mid = FALSE) { 2 | a = seq(starting_angle, starting_angle + 360, length.out = n_segments + 1) 3 | if (angles_mid) a = a - (360 / n_segments) / 2 4 | a / 180 * pi 5 | } 6 | 7 | doughnut_areas = function(n_circles, distance) { 8 | csdistance = c(0, cumsum(distance)) 9 | sapply(2:(n_circles+1), function(i) { 10 | (pi * ((csdistance[i]) ^ 2)) - (pi * ((csdistance[i-1]) ^ 2)) 11 | }) 12 | } 13 | 14 | # n_circles = 10 15 | # x = london_area 16 | # point = london_area_midpoint 17 | find_distance_equal_dohnut = function(x, n_circles, point) { 18 | if(is.null(point)) point = sf::st_centroid(x) 19 | boundary_points = sf::st_cast(x, "POINT") 20 | distances_to_points = sf::st_distance(boundary_points, point) 21 | max_distance = as.numeric(max(distances_to_points)) / 1000 22 | # / cos(pi / 180 * 45) # add multiplier to account for hypotenuse issue 23 | max_distance / (n_circles) 24 | } 25 | 26 | # get_distances(1, 1, 10) 27 | # get_distances(2, 1, 10) 28 | # get_distances(1, 2, 10) 29 | # get_distances(.1, .1, 10) 30 | get_distances = function(distance, distance_growth, n_circles) { 31 | distance + (0:(n_circles-1)) * distance_growth 32 | } 33 | 34 | # x = london_area 35 | # number_of_circles(x, 1, 1, sf::st_centroid(x)) 36 | # number_of_circles(x, 0.1, 0.1, sf::st_centroid(x)) 37 | number_of_circles = function(area, distance, distance_growth, x) { 38 | boundary_points = suppressWarnings(sf::st_cast(area, "POINT")) 39 | distances_to_points = sf::st_distance(boundary_points, x) 40 | max_distance = as.numeric(max(distances_to_points)) / 1000 41 | csdistances = cumsum(get_distances(distance, distance_growth, 100)) 42 | 43 | which( 44 | zonebuilder::zb_100_triangular_numbers * distance > max_distance 45 | )[1] + 1 46 | # / cos(pi / 180 * 45) # add multiplier to account for hypotenuse issue 47 | } 48 | 49 | # distances = function(distance, distance_growth) { 50 | # 51 | # } 52 | 53 | number_of_segments = function(n_circles = 10, distance = rep(1, n_circles)) { 54 | areas = doughnut_areas(n_circles = n_circles, distance = distance) 55 | areas / areas[1] 56 | } 57 | -------------------------------------------------------------------------------- /R/labels.R: -------------------------------------------------------------------------------- 1 | zb_clock_labels = function(n_circles, segment_center = FALSE) { 2 | do.call(rbind, lapply(1:n_circles, function(i) { 3 | if (i==1L && !segment_center) { 4 | data.frame(circle_id = i, segment_id = 0, label = "A", stringsAsFactors = FALSE) 5 | } else { 6 | data.frame(circle_id = i, segment_id = 1:12, label = paste0(LETTERS[i], sprintf("%02d", 1:12)), stringsAsFactors = FALSE) 7 | } 8 | })) 9 | } 10 | 11 | 12 | # zb_quadrant_labels(5) 13 | zb_quadrant_labels = function(n_circles, n_segments = 12, segment_center = FALSE, quadrants = c("N", "E", "S", "W")) { 14 | 15 | # check n_segments 16 | if (any((n_segments %% 4) != 0 & n_segments != 1)) stop("n_segments should be equal to 1 or a multiple of 4") 17 | n_segments = rep(n_segments, length.out = n_circles) 18 | if (!segment_center) n_segments[1] = 1 19 | 20 | two_decimals_required = any(n_segments >= 40) 21 | 22 | do.call(rbind, mapply(function(i, j) { 23 | ring = LETTERS[i] 24 | quad = quadrants[ceiling(((1:j)/j) * 4)] 25 | seg = (((1:j - 1)/j) %% 0.25) * j + 1 26 | 27 | if (two_decimals_required) { 28 | seg = sprintf("%02d", seg) 29 | } 30 | 31 | labels = if (j == 1) { 32 | ring 33 | } else if (j == 4) { 34 | paste0(ring, quad) 35 | } else { 36 | paste0(ring, quad, seg) 37 | } 38 | 39 | if (j==1) { 40 | segment_id = 0 41 | } else { 42 | segment_id = 1:j 43 | } 44 | 45 | 46 | data.frame(circle_id = i, segment_id = segment_id, label = labels, stringsAsFactors = FALSE) 47 | }, 1:n_circles, n_segments, SIMPLIFY = FALSE)) 48 | } 49 | -------------------------------------------------------------------------------- /R/plot.R: -------------------------------------------------------------------------------- 1 | #' Generate colors for zones 2 | #' 3 | #' This function generates colors for zones. 4 | #' 5 | #' @param z An `sf` object containing zones covering the region 6 | #' @param palette Palette type, one of \code{"hcl"} (a palette based on the HCL color space), \code{"rings"} (a palette which colors the rings using the YlOrBr color brewer palette), \code{"dartboard"} (a palette which resembles a dartboard) 7 | #' @return A vector of colors 8 | #' @export 9 | #' @importFrom RColorBrewer brewer.pal 10 | #' 11 | #' @examples 12 | #' z = zb_zone(london_c(), london_a()) 13 | #' zb_color(z) 14 | #' plot(z[, "circle_id"], col = zb_color(z)) 15 | zb_color = function(z, palette = c("rings", "hcl", "dartboard")) { 16 | palette = match.arg(palette) 17 | 18 | if (palette == "hcl") { 19 | z$h = z$segment_id * 30 20 | z$l = pmin(10 + z$circle_id * 15, 100) 21 | z$c = 70 + ((100-z$l) / 80 * 30) 22 | z$c[z$segment_id == 0] = 0 23 | 24 | hcl(h = z$h, c = z$c, l = z$l) 25 | } else if (palette == "rings") { 26 | RColorBrewer::brewer.pal(9, "YlOrBr")[pmin(9,z$circle_id+1)] 27 | } else if (palette == "dartboard") { 28 | 29 | z$blackred = ((z$segment_id %% 2) == 0) 30 | z$blackwhite = ((z$circle_id %% 2) == 0) 31 | 32 | ifelse(z$blackred, ifelse(z$blackwhite, "#181818", "#C62627"), ifelse(z$blackwhite, "#EAD0AE", "#0BA158")) 33 | 34 | 35 | } 36 | } 37 | 38 | #' Plot zones 39 | #' 40 | #' This function opens a static map of the zones 41 | #' 42 | #' @param z An `sf` object containing zones covering the region 43 | #' @param palette Palette type, one of \code{"hcl"} (a palette based on the HCL color space), \code{"rings"} (a palette which colors the rings using the YlOrBr color brewer palette), \code{"dartboard"} (a palette which resembles a dartboard) 44 | #' @param title Plot title 45 | #' @param text_size Vector of two numeric values that determine the relative text sizes. The first determines the smallest text size and the second one the largest text size. The largest text size is used for the outermost circle, and the smallest for the central circle in case there are 9 or more circles. If there are less circles, the relative text size is larger (see source code for exact method) 46 | #' @param zone_label_thres This number determines in which zones labels are printed, namely each zone for which the relative area size is larger than `zone_label_thres`. 47 | #' @importFrom graphics par mtext 48 | #' @return A static plot created using R's base `graphics` package 49 | #' @export 50 | #' @examples 51 | #' zb_plot(zb_zone(london_c())) 52 | zb_plot = function(z, palette = c("rings", "hcl", "dartboard"), title = NULL, text_size = c(0.3, 1), zone_label_thres = 0.002) { 53 | palette = match.arg(palette) 54 | z$color = zb_color(z, palette) 55 | 56 | areas = as.numeric(sf::st_area(z)) 57 | areas = areas / sum(areas) 58 | 59 | sel = areas > zone_label_thres 60 | 61 | cent = sf::st_set_crs(sf::st_set_geometry(z, "centroid"), sf::st_crs(z)) 62 | 63 | oldpar = par(no.readonly = TRUE) # code line i 64 | on.exit(par(oldpar)) # code line i + 1 65 | p = graphics::par(mar=c(.2,.2,.2,.2)) 66 | plot(sf::st_geometry(z), col = z$color, border = "grey40") 67 | co = sf::st_coordinates(cent[sel,]) 68 | mx = max(z$circle_id[sel]) 69 | cex = seq(text_size[1], text_size[2], length.out = 9)[pmin(9, z$circle_id[sel] + (9-mx))] 70 | text(co[, 1], co[, 2], cex = cex, labels = z$label[sel]) 71 | 72 | if (!is.null(title)) graphics::mtext(title, 3, adj=0, line=-1) 73 | graphics::par(p) 74 | } 75 | -------------------------------------------------------------------------------- /R/quadrat.R: -------------------------------------------------------------------------------- 1 | #' Divide a region into quadrats 2 | #' 3 | #' @param x x 4 | #' @param ncol ncol 5 | #' @param nrow nrow 6 | #' @param intersection intersection 7 | #' 8 | #' @return An sf object 9 | #' @export 10 | #' 11 | #' @examples 12 | #' x = london_a() 13 | #' c = sf::st_centroid(london_a()) 14 | #' plot(zb_quadrat(x, ncol = 2), col = 2:5) 15 | #' plot(c, add = TRUE, col = "white") 16 | #' plot(zb_quadrat(x, ncol = 3)) 17 | #' plot(zb_quadrat(x, ncol = 4)) 18 | #' plot(zb_quadrat(x, ncol = 4, intersection = FALSE)) 19 | zb_quadrat = function(x, ncol, nrow = NULL, intersection = TRUE) { 20 | g = sf::st_make_grid(x = x, n = ncol) 21 | if(!intersection) { 22 | return(g) 23 | } 24 | sf::st_intersection(x, g) 25 | } -------------------------------------------------------------------------------- /R/segment.R: -------------------------------------------------------------------------------- 1 | #' Make segments 2 | #' 3 | #' @inheritParams zb_zone 4 | #' 5 | #' @return An `sf` data frame 6 | #' 7 | #' @export 8 | #' @examples 9 | #' zb_plot(zb_segment(london_c(), london_a())) 10 | zb_segment = function(x = NULL, 11 | area = NULL, 12 | n_segments = 12, 13 | distance = NA) { 14 | if (is.na(distance)) distance = ifelse(is.null(area), 15, 100) # 15 is the same as default ClockBoard with 5 rings, 100 is chosen to be large enough to cover arae 15 | zb_zone(x = x, area = area, n_circles = 1, distance = distance, n_segments = n_segments, segment_center = TRUE) 16 | 17 | } 18 | 19 | create_segments = function(x, n_segments = 4, starting_angle = -45, distance = 100000) { 20 | if (n_segments == 1) return(NULL) 21 | fr_matrix = matrix(sf::st_coordinates(x), ncol = 2) 22 | #angles_deg = seq(0, to = 360, by = 360 / n_segments) + starting_angle 23 | #angles_rad = angles_deg / 180 * pi 24 | 25 | angles_rad = get_angles(n_segments = n_segments, starting_angle = starting_angle) 26 | 27 | x_coord_to = distance * cos(angles_rad - 0.5 * pi) + fr_matrix[, 1] 28 | y_coord_to = distance * -sin(angles_rad - 0.5 * pi) + fr_matrix[, 2] 29 | to_matrix = cbind(x_coord_to, y_coord_to) 30 | to_matrix_next = to_matrix[c(2:nrow(to_matrix), 1), ] 31 | coord_matrix_list = lapply(1:n_segments, function(x) 32 | rbind(fr_matrix, to_matrix[x, ], to_matrix_next[x, ], fr_matrix)) 33 | poly_list = lapply(coord_matrix_list, function(x) sf::st_polygon(list(x))) 34 | sf::st_sfc(poly_list, crs = sf::st_crs(x)) 35 | } 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /R/utils.R: -------------------------------------------------------------------------------- 1 | utils::globalVariables(c("number_of_segments", "zb_100_triangular_numbers")) 2 | NULL -------------------------------------------------------------------------------- /R/zb_lines.R: -------------------------------------------------------------------------------- 1 | #' Create lines radiating at equal angles from a point 2 | #' 3 | #' @param point Center point 4 | #' @param n Number of lines 5 | #' @param starting_angle Starting angle 6 | #' @param distance Distance 7 | #' 8 | #' @return Objects of class `sfc` containing linestring geometries 9 | #' @export 10 | #' 11 | #' @examples 12 | #' point = sf::st_centroid(london_a()) 13 | #' n = 4 14 | #' l = zb_lines(point, n) 15 | #' plot(l) 16 | zb_lines = function(point, n, starting_angle = 45, distance = 100000) { 17 | fr_matrix = matrix(sf::st_coordinates(point), ncol = 2) 18 | angles_deg = seq(0, to = 360, by = 360 / n) + starting_angle 19 | angles_rad = angles_deg / 180 * pi 20 | x_coord_to = distance * cos(angles_rad) + fr_matrix[, 1] 21 | y_coord_to = distance * sin(angles_rad) + fr_matrix[, 2] 22 | to_matrix = cbind(x_coord_to, y_coord_to) 23 | line_matrix_list = lapply(1:n, function(x) rbind(fr_matrix, to_matrix[x, ])) 24 | sf::st_sfc(lapply(line_matrix_list, sf::st_linestring), crs = sf::st_crs(point)) 25 | } 26 | 27 | # test: break up our doughnut 28 | 29 | -------------------------------------------------------------------------------- /R/zone.R: -------------------------------------------------------------------------------- 1 | #' Generate zones covering a region of interest 2 | #' 3 | #' This function first divides geographic space into [annuli](https://en.wikipedia.org/wiki/Annulus_(mathematics)) 4 | #' (concentric 2d rings or 'doughnuts') and then subdivides each annulus 5 | #' into a number of segments. 6 | #' 7 | #' By default 12 segments are used for each annuli, resulting in a zoning system that can 8 | #' be used to refer to segments in [clock position](https://en.wikipedia.org/wiki/Clock_position), 9 | #' with 12 representing North, 3 representing East, 6 Sounth and 9 Western segments. 10 | #' 11 | #' @param x Centre point. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one point, or a name of a city (which is looked up with OSM geocoding). 12 | #' @param area (optional) Area. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one (multi) polygon 13 | #' @param n_circles Number of rings including the central circle. By default 5, unless \code{area} is specified (then it is set automatically to fill the area). 14 | #' @param n_segments (optional) Number of segments. The number of segments. Either one number which determines the number of segments applied to all circles, or a vector with a number for each circle (which should be a multiple of 4, see also the argument \code{labeling}). By default, the central circle is not segmented (see the argument \code{segment_center}). 15 | #' @param distance Distance The distances between the circles. For the center circle, it is the distance between the center and the circle. If only one number is specified, \code{distance_growth} determines the increment at which the distances grow for the outer circles. 16 | #' @param distance_growth The rate at which the distances between the circles grow. Only applicable when \code{distance} is one number and \code{n_circles > 1}. See also \code{distance}. 17 | #' @param labeling The labeling of the zones. Either \code{"clock"} which uses the clock ananolgy (i.e. hours 1 to 12) or \code{"NESW"} which uses the cardinal directions N, E, S, W. If the number of segments is 12, the clock labeling is used, and otherwise NESW. Note that the number of segments should be a multiple of four. If, for instance the number of segments is 8, than the segments are labeled N1, N2, E1, E2, S1, S2, W1, and W2. 18 | #' @param starting_angle The angle of the first of the radii that create the segments (degrees). By default, it is either 15 when \code{n_segments} is 12 (i.e. the ClockBoard setting) and -45 otherwise. 19 | #' @param segment_center Should the central circle be divided into segments? `FALSE` by default. 20 | #' @param intersection Should the zones be intersected with the area? \code{TRUE} by default. 21 | #' @param city (optional) Name of the city. If specified, it adds a column `city` to the returned `sf` object. 22 | #' 23 | #' @return An `sf` object containing zones covering the region 24 | #' @export 25 | #' @import sf 26 | #' @importFrom graphics plot text 27 | #' @importFrom grDevices hcl 28 | #' @examples 29 | #' # default settings 30 | #' z = zb_zone(london_c(), london_a()) 31 | #' zb_plot(zb_zone(london_c(), london_a(), n_circles = 2)) 32 | #' zb_plot(zb_zone(london_c(), london_a(), n_circles = 4, distance = 2, distance_growth = 0)) 33 | #' zb_plot(zb_zone(london_c(), london_a(), n_circles = 3, n_segments = c(1,4,8))) 34 | zb_zone = function(x = NULL, 35 | area = NULL, 36 | n_circles = NA, 37 | n_segments = 12, 38 | distance = 1, 39 | distance_growth = 1, 40 | labeling = NA, 41 | starting_angle = NA, 42 | segment_center = FALSE, 43 | intersection = TRUE, 44 | city = NULL) { 45 | 46 | # checks and preprosessing x and area 47 | if (is.null(x) && is.null(area)) stop("Please specify either x or area") 48 | 49 | if (!is.null(area)) { 50 | area = sf::st_geometry(area) 51 | if (!inherits(area, c("sfc_POLYGON", "sfc_MULTIPOLYGON"))) stop("area is not a (multi)polygon") 52 | if (!(length(area) == 1)) stop("area should contain only one (multi)polygon") 53 | if (is.na(sf::st_crs(area))) stop("crs of area is unkown") 54 | } 55 | 56 | if (is.null(x)) { 57 | x = sf::st_centroid(area) 58 | } else { 59 | if (!inherits(x, c("sf", "sfc"))) { 60 | if (is.character(x)) { 61 | if (!requireNamespace("tmaptools")) { 62 | stop("Please install tmaptools first") 63 | } else { 64 | x = tmaptools::geocode_OSM(x, as.sf = TRUE) 65 | } 66 | } else { 67 | stop("x should be an sf(c) object or a city name") 68 | } 69 | } 70 | 71 | x = sf::st_geometry(x) 72 | if (!inherits(x, "sfc_POINT")) stop("x is not a point") 73 | if (!(length(x) == 1)) stop("x should contain only one point") 74 | if (is.na(sf::st_crs(x))) stop("crs of x is unkown") 75 | if (!is.null(area) && !identical(sf::st_crs(area), sf::st_crs(x))) { 76 | area = sf::st_transform(area, sf::st_crs(x)) 77 | } 78 | } 79 | 80 | if (!is.null(area) && !sf::st_contains(area, x, sparse = FALSE)[1]) stop("x is not located in area") 81 | 82 | # other checks / preprosessing 83 | if (is.na(n_circles)) { 84 | if (!is.null(area)) { 85 | n_circles = number_of_circles(area, distance, distance_growth, x) 86 | } else { 87 | n_circles = 5 88 | } 89 | } 90 | 91 | if (n_circles == 1 && n_segments > 1 && !segment_center) { 92 | message("Please set segment_center = TRUE to divide the centre into multiple segments") 93 | } 94 | if (length(distance) != n_circles) { 95 | distance = get_distances(distance, distance_growth, n_circles) 96 | } 97 | if (is.na(labeling)) labeling = ifelse(all(n_segments == 12), "clock", "NESW") 98 | if (is.na(starting_angle)) starting_angle = ifelse(labeling == "clock", 15, -45) 99 | 100 | # project if needed (and reproject back at the end) 101 | orig_crs = sf::st_crs(x) 102 | if (sf::st_is_longlat(orig_crs)) { 103 | crs = geo_select_aeq(x) 104 | x = sf::st_transform(x, crs = crs) 105 | if (!is.null(area)) area = sf::st_transform(area, crs = crs) 106 | } 107 | 108 | # create doughnuts 109 | doughnuts = create_rings(x, n_circles, distance) 110 | 111 | # update n_circles 112 | n_circles = nrow(doughnuts) 113 | 114 | # clock_labels = (identical(n_segments, 12)) 115 | # if (is.na(starting_angle)) starting_angle = ifelse(clock_labels, 15, -45) 116 | 117 | # alternatives: add argument use_clock_labels? or another function with different params? 118 | 119 | n_segments = rep(n_segments, length.out = n_circles) 120 | if (!segment_center) n_segments[1] = 1 121 | 122 | # create segments 123 | segments = lapply(n_segments, 124 | create_segments, 125 | x = x, 126 | # starting_angle = ifelse(clock_labels, 15, -45)) 127 | starting_angle = starting_angle) 128 | 129 | # transform to sf and number them 130 | segments = lapply(segments, function(x) { 131 | if (is.null(x)) return(x) 132 | y = sf::st_as_sf(x) 133 | y$segment_id = 1:nrow(y) 134 | y 135 | }) 136 | 137 | # intersect doughnuts with x (the area polygon) 138 | if(!is.null(area) && intersection) { 139 | if (!all(sf::st_is_valid(area))) { 140 | if (!requireNamespace("lwgeom")) { 141 | stop("Combining polygons failed. Please install lwgeom and try again") 142 | } else { 143 | x = sf::st_make_valid(x) 144 | } 145 | } 146 | area = st_union(st_buffer(area, dist = 0.01)) #0.01 (in most crs's 1 cm) is arbitrary chosen, but works to resolve strange artefacts 147 | 148 | zones_ids = which(sapply(sf::st_intersects(doughnuts, area), length) > 0) 149 | doughnuts = suppressWarnings(sf::st_intersection(doughnuts, area)) 150 | segments = segments[zones_ids] 151 | } else { 152 | zones_ids = 1:n_circles 153 | } 154 | 155 | # intersect the result with segments 156 | doughnut_segments = do.call(rbind, mapply(function(i, x, y) { 157 | if (is.null(y)) { 158 | x$segment_id = 0 159 | x$circle_id = i 160 | x 161 | } else { 162 | if (i==1 && !segment_center) { 163 | res = x 164 | res$segment_id = 0 165 | res$circle_id = i 166 | } else { 167 | res = suppressWarnings(sf::st_intersection(x, y)) 168 | res$circle_id = i 169 | } 170 | res 171 | } 172 | }, zones_ids, split(doughnuts, 1:length(zones_ids)), segments, SIMPLIFY = FALSE)) 173 | 174 | # doughnut_segments$segment_id = formatC(doughnut_segments$segment_id, width = 2, flag = 0) 175 | # doughnut_segments$circle_id = formatC(doughnut_segments$circle_id, width = 2, flag = 0) 176 | 177 | # attach labels 178 | if (labeling == "clock") { 179 | labels_df = zb_clock_labels(n_circles, segment_center = segment_center) 180 | } else { 181 | labels_df = zb_quadrant_labels(n_circles, n_segments, segment_center) 182 | } 183 | 184 | df = merge(doughnut_segments, labels_df, by = c("circle_id", "segment_id")) 185 | df = df[c("label", "circle_id", "segment_id")] 186 | 187 | order_id = order(df$circle_id * 100 + df$segment_id) 188 | z = sf::st_transform(df[order_id, ], crs = orig_crs) 189 | if (!all(sf::st_is_valid(z))) { 190 | if (!requireNamespace("lwgeom")) { 191 | warning("sf object invalid. To fix it, install lwgeom, and rerun zb_zone") 192 | } else { 193 | z = sf::st_make_valid(z) 194 | z = suppressWarnings(st_cast(z, "MULTIPOLYGON")) # st_make_valid may return geometrycollections with empty points/lines 195 | } 196 | } 197 | 198 | z$centroid = sf::st_geometry(st_centroid_within_poly(z)) 199 | 200 | if (!is.null(city)) { 201 | z$city = city 202 | } 203 | 204 | z 205 | } 206 | 207 | 208 | st_centroid_within_poly <- function (poly) { 209 | 210 | # check if centroid is in polygon 211 | centroid <- suppressWarnings(sf::st_centroid(poly)) 212 | in_poly <- diag(sf::st_within(centroid, poly, sparse = F)) 213 | 214 | if (any(!in_poly)) { 215 | suppressWarnings({ 216 | centroid$geometry[!in_poly] <- st_point_on_surface(poly[!in_poly,])$geometry 217 | }) 218 | } 219 | 220 | return(centroid) 221 | } 222 | 223 | # Create zones of equal area (to be documented) 224 | # z = zb_zone(london_a(), n_circles = 8, distance_growth = 0, equal_area = TRUE) # bug with missing pies 225 | # suggestion: split out new new function, reduce n. arguments 226 | # plot(z, col = 1:nrow(z)) 227 | zb_zone_equal_area = function(x = NULL, 228 | point = NULL, 229 | n_circles = NULL, 230 | # n_segments = c(1, (1:(n_circles - 1)) * 4), # NA 231 | n_segments = NA, 232 | distance = 1, 233 | distance_growth = 1, 234 | intersection = TRUE) { 235 | # Functions to calculate distances 236 | n_segments = number_of_segments(n_circles = n_circles, distance = distance) 237 | zb_zone(x, point, n_circles, n_segments, distance, intersection = intersection) 238 | 239 | } 240 | 241 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: github_document 3 | editor_options: 4 | chunk_output_type: console 5 | --- 6 | 7 | 8 | 9 | 10 | ```{r, include = FALSE} 11 | knitr::opts_chunk$set( 12 | collapse = TRUE, 13 | comment = "#>", 14 | fig.path = "man/figures/README-", 15 | out.width = "100%" 16 | ) 17 | ``` 18 | 19 | # zonebuilder 20 | 21 | 22 | 23 | [![R-CMD-check](https://github.com/zonebuilders/zonebuilder/workflows/R-CMD-check/badge.svg)](https://github.com/zonebuilders/zonebuilder/actions) 24 | [![CRAN status](https://www.r-pkg.org/badges/version/zonebuilder)](https://CRAN.R-project.org/package=zonebuilder) 25 | [![CRAN RStudio mirror 26 | downloads](https://cranlogs.r-pkg.org/badges/grand-total/zonebuilder)](https://www.r-pkg.org/pkg/zonebuilder) 27 | [DOI](https://doi.org/10.5311/JOSIS.2022.24.172) 28 | 29 | 30 | The goal of zonebuilder is to break up large geographic regions such as cities into manageable zones. 31 | Zoning systems are important in many fields, including demographics, economy, health, and transport. The zones have standard configuration, which enabled comparability across cities. See its website at [zonebuilders.github.io/zonebuilder](https://zonebuilders.github.io/zonebuilder/) and the academic paper that describes the package in detail [here (Lovelace et al. 2022)](https://doi.org/10.5311/JOSIS.2022.24.172). 32 | 33 | ## Installation 34 | 35 | You can install the released version of zonebuilder from [CRAN](https://CRAN.R-project.org) with: 36 | 37 | ```{r, eval=FALSE} 38 | install.packages("zonebuilder") 39 | ``` 40 | 41 | Install it from [GitHub](https://github.com/) with: 42 | 43 | ```{r install-gh, eval=FALSE} 44 | # install.packages("remotes") 45 | remotes::install_github("zonebuilders/zonebuilder") 46 | ``` 47 | 48 | ## Using zonebuilder 49 | 50 | Zonebuilder builds on the `sf` package and works well with mapping packages such as `ggplot2`, `leaflet`, `mapdeck`, `mapview` and `tmap`, the last of which we'll use in the following maps. 51 | Attaching the package provides the example datasets `london_a()` and `london_c()`, the geographic boundary and the centre of London: 52 | 53 | ```{r} 54 | library(zonebuilder) 55 | library(tmap) 56 | tmap_mode("plot") 57 | tm_shape(london_a()) + 58 | tm_borders() + 59 | tm_shape(london_c()) + 60 | tm_dots("red") 61 | ``` 62 | 63 | The main function `zb_zone` breaks this geographical scale into zones. The default settings follow the **ClockBoard** configuration: 64 | 65 | ```{r} 66 | london_zones <- zb_zone(london_c(), london_a()) 67 | zb_plot(london_zones) 68 | ``` 69 | 70 | The idea behind this zoning system is based on the following principles: 71 | 72 | * Most cities have a centre, the 'heart' of the city. Therefore, the zones are distributed around the centre. 73 | * Typically, the population is much denser in and around the centre and also the traffic intensity is higher. Therefore, the zones are smaller in and around the centre. 74 | * The rings (so A, B, C, D, etc) reflect the proximity to the centre point. The distances from the outer borders of the rings A, B, C, D, etc. follow the triangular number sequence 1, 3, 6, 10, etc. This means that in everyday life use, within zone A everything is in walking distance, from ring B to the centre requires a bike, from zone C and further to the centre typically requires public transport. 75 | * Regarding direction relative to the centre, we use the clock analogy, since most people are familiar with that. So each ring (annuli) is divided into 12 segments, where segment 12 is directed at 12:00, segment 1 at 1:00 etc. 76 | 77 | 78 | The package `zonebuilder` does not only create zoning systems based on the CloadBoard layout as illustrated below. 79 | 80 | 81 | The function `zb_zone` makes use of `zb_doughnut` and `zb_segment`, which can also be used directly: 82 | 83 | ```{r fig.height=3} 84 | par(mfrow = c(1, 3)) 85 | zb_plot(zb_doughnut(london_c(), london_a(), n_circles = 5), title = "Doughnuts") 86 | zb_plot(zb_segment(london_c(), n_segments = 20), title = "Segments") 87 | zb_plot(zb_zone(london_c(), n_circles = 4, n_segments = 4), title = "4 segments, 4 circles") 88 | ``` 89 | 90 | The package also contains a function to create zones based on a simple rectangular grid system: 91 | 92 | ```{r} 93 | z = zb_quadrat(london_a(), ncol = 10) 94 | plot(z) 95 | ``` 96 | 97 | 98 | ## Contribute 99 | 100 | Contributions are welcome! 101 | 102 | It may be worth checking-in in a [discussion post](https://github.com/zonebuilders/zonebuilder/discussions/28) before opening an issue. 103 | 104 | ## Citation 105 | 106 | Please cite the package as follows (Lovelace et al. 2022): 107 | 108 | ``` 109 | @article{lovelace_clockboard_2022, 110 | title = {{{ClockBoard}}: {{A}} Zoning System for Urban Analysis}, 111 | shorttitle = {{{ClockBoard}}}, 112 | author = {Lovelace, Robin and Tennekes, Martijn and Carlino, Dustin}, 113 | date = {2022-06-20}, 114 | journaltitle = {Journal of Spatial Information Science}, 115 | number = {24}, 116 | pages = {63--85}, 117 | issn = {1948-660X}, 118 | doi = {10.5311/JOSIS.2022.24.172}, 119 | url = {https://josis.org/index.php/josis/article/view/172}, 120 | urldate = {2022-07-02}, 121 | abstract = {Zones are the building blocks of urban analysis. Fields ranging from demographics to transport planning routinely use zones - spatially contiguous areal units that break-up continuous space into discrete chunks - as the foundation for diverse analysis techniques. Key methods such as origin-destination analysis and choropleth mapping rely on zones with appropriate sizes, shapes and coverage. However, existing zoning systems are sub-optimal in many urban analysis contexts, for three main reasons: 1) administrative zoning systems are often based on somewhat arbitrary factors; 2) zoning systems that are evidence-based (e.g., based on equal population size) are often highly variable in size and shape, reducing their utility for inter-city comparison; and 3) official zoning systems in many places simply do not exist or are unavailable. We set out to develop a flexible, open and scalable solution to these problems. The result is the zonebuilder project (with R, Rust and Python implementations), which was used to create the ClockBoard zoning system. ClockBoard consists of 12 segments emanating from a central place and divided by concentric rings with radii that increase in line with the triangular number sequence (1, 3, 6 km etc). 'ClockBoards' thus create a consistent visual frame of reference for monocentric cities that is reminiscent of clocks and a dartboard. This paper outlines the design and potential uses of the ClockBoard zoning system in the historical context, and discusses future avenues for research into the design and assessment of zoning systems.}, 122 | issue = {24}, 123 | langid = {english}, 124 | keywords = {modifiable area unit problem}, 125 | file = {C:\Users\georl_admin\Zotero\storage\QRQDMJSH\Lovelace et al. - 2022 - ClockBoard A zoning system for urban analysis.pdf} 126 | } 127 | ``` -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # zonebuilder 5 | 6 | 7 | 8 | 9 | 10 | [![R-CMD-check](https://github.com/zonebuilders/zonebuilder/workflows/R-CMD-check/badge.svg)](https://github.com/zonebuilders/zonebuilder/actions) 11 | [![CRAN 12 | status](https://www.r-pkg.org/badges/version/zonebuilder)](https://CRAN.R-project.org/package=zonebuilder) 13 | [![CRAN RStudio mirror 14 | downloads](https://cranlogs.r-pkg.org/badges/grand-total/zonebuilder)](https://www.r-pkg.org/pkg/zonebuilder) 15 | [DOI](https://doi.org/10.5311/JOSIS.2022.24.172) 16 | 17 | The goal of zonebuilder is to break up large geographic regions such as 18 | cities into manageable zones. Zoning systems are important in many 19 | fields, including demographics, economy, health, and transport. The 20 | zones have standard configuration, which enabled comparability across 21 | cities. See its website at 22 | [zonebuilders.github.io/zonebuilder](https://zonebuilders.github.io/zonebuilder/) 23 | and the academic paper that describes the package in detail [here 24 | (Lovelace et al. 2022)](https://doi.org/10.5311/JOSIS.2022.24.172). 25 | 26 | ## Installation 27 | 28 | You can install the released version of zonebuilder from 29 | [CRAN](https://CRAN.R-project.org) with: 30 | 31 | ``` r 32 | install.packages("zonebuilder") 33 | ``` 34 | 35 | Install it from [GitHub](https://github.com/) with: 36 | 37 | ``` r 38 | # install.packages("remotes") 39 | remotes::install_github("zonebuilders/zonebuilder") 40 | ``` 41 | 42 | ## Using zonebuilder 43 | 44 | Zonebuilder builds on the `sf` package and works well with mapping 45 | packages such as `ggplot2`, `leaflet`, `mapdeck`, `mapview` and `tmap`, 46 | the last of which we’ll use in the following maps. Attaching the package 47 | provides the example datasets `london_a()` and `london_c()`, the 48 | geographic boundary and the centre of London: 49 | 50 | ``` r 51 | library(zonebuilder) 52 | library(tmap) 53 | tmap_mode("plot") 54 | tm_shape(london_a()) + 55 | tm_borders() + 56 | tm_shape(london_c()) + 57 | tm_dots("red") 58 | ``` 59 | 60 | 61 | 62 | The main function `zb_zone` breaks this geographical scale into zones. 63 | The default settings follow the **ClockBoard** configuration: 64 | 65 | ``` r 66 | london_zones <- zb_zone(london_c(), london_a()) 67 | zb_plot(london_zones) 68 | ``` 69 | 70 | 71 | 72 | The idea behind this zoning system is based on the following principles: 73 | 74 | - Most cities have a centre, the ‘heart’ of the city. Therefore, the 75 | zones are distributed around the centre. 76 | - Typically, the population is much denser in and around the centre and 77 | also the traffic intensity is higher. Therefore, the zones are smaller 78 | in and around the centre. 79 | - The rings (so A, B, C, D, etc) reflect the proximity to the centre 80 | point. The distances from the outer borders of the rings A, B, C, D, 81 | etc. follow the triangular number sequence 1, 3, 6, 10, etc. This 82 | means that in everyday life use, within zone A everything is in 83 | walking distance, from ring B to the centre requires a bike, from zone 84 | C and further to the centre typically requires public transport. 85 | - Regarding direction relative to the centre, we use the clock analogy, 86 | since most people are familiar with that. So each ring (annuli) is 87 | divided into 12 segments, where segment 12 is directed at 12:00, 88 | segment 1 at 1:00 etc. 89 | 90 | The package `zonebuilder` does not only create zoning systems based on 91 | the CloadBoard layout as illustrated below. 92 | 93 | The function `zb_zone` makes use of `zb_doughnut` and `zb_segment`, 94 | which can also be used directly: 95 | 96 | ``` r 97 | par(mfrow = c(1, 3)) 98 | zb_plot(zb_doughnut(london_c(), london_a(), n_circles = 5), title = "Doughnuts") 99 | ``` 100 | 101 | 102 | 103 | ``` r 104 | zb_plot(zb_segment(london_c(), n_segments = 20), title = "Segments") 105 | ``` 106 | 107 | 108 | 109 | ``` r 110 | zb_plot(zb_zone(london_c(), n_circles = 4, n_segments = 4), title = "4 segments, 4 circles") 111 | ``` 112 | 113 | 114 | 115 | The package also contains a function to create zones based on a simple 116 | rectangular grid system: 117 | 118 | ``` r 119 | z = zb_quadrat(london_a(), ncol = 10) 120 | plot(z) 121 | ``` 122 | 123 | 124 | 125 | ## Contribute 126 | 127 | Contributions are welcome! 128 | 129 | It may be worth checking-in in a [discussion 130 | post](https://github.com/zonebuilders/zonebuilder/discussions/28) before 131 | opening an issue. 132 | 133 | ## Citation 134 | 135 | Please cite the package as follows (Lovelace et al. 2022): 136 | 137 | @article{lovelace_clockboard_2022, 138 | title = {{{ClockBoard}}: {{A}} Zoning System for Urban Analysis}, 139 | shorttitle = {{{ClockBoard}}}, 140 | author = {Lovelace, Robin and Tennekes, Martijn and Carlino, Dustin}, 141 | date = {2022-06-20}, 142 | journaltitle = {Journal of Spatial Information Science}, 143 | number = {24}, 144 | pages = {63--85}, 145 | issn = {1948-660X}, 146 | doi = {10.5311/JOSIS.2022.24.172}, 147 | url = {https://josis.org/index.php/josis/article/view/172}, 148 | urldate = {2022-07-02}, 149 | abstract = {Zones are the building blocks of urban analysis. Fields ranging from demographics to transport planning routinely use zones - spatially contiguous areal units that break-up continuous space into discrete chunks - as the foundation for diverse analysis techniques. Key methods such as origin-destination analysis and choropleth mapping rely on zones with appropriate sizes, shapes and coverage. However, existing zoning systems are sub-optimal in many urban analysis contexts, for three main reasons: 1) administrative zoning systems are often based on somewhat arbitrary factors; 2) zoning systems that are evidence-based (e.g., based on equal population size) are often highly variable in size and shape, reducing their utility for inter-city comparison; and 3) official zoning systems in many places simply do not exist or are unavailable. We set out to develop a flexible, open and scalable solution to these problems. The result is the zonebuilder project (with R, Rust and Python implementations), which was used to create the ClockBoard zoning system. ClockBoard consists of 12 segments emanating from a central place and divided by concentric rings with radii that increase in line with the triangular number sequence (1, 3, 6 km etc). 'ClockBoards' thus create a consistent visual frame of reference for monocentric cities that is reminiscent of clocks and a dartboard. This paper outlines the design and potential uses of the ClockBoard zoning system in the historical context, and discusses future avenues for research into the design and assessment of zoning systems.}, 150 | issue = {24}, 151 | langid = {english}, 152 | keywords = {modifiable area unit problem}, 153 | file = {C:\Users\georl_admin\Zotero\storage\QRQDMJSH\Lovelace et al. - 2022 - ClockBoard A zoning system for urban analysis.pdf} 154 | } 155 | -------------------------------------------------------------------------------- /_pkgdown.yml: -------------------------------------------------------------------------------- 1 | destination: docs 2 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | Many updates enabling tests to pass on CRAN, after the package was removed due to failing tests. 2 | 3 | I have made fixes (to DESCRIPTION and package size) to address the issues raised by CRAN. 4 | 5 | ## Test environments 6 | * local R installation, R 4.1.0 7 | * ubuntu 16.04 (on travis-ci), R 4.1.0 8 | * win-builder (devel) 9 | 10 | ## R CMD check results 11 | 12 | 0 errors | 0 warnings | 1 note 13 | 14 | * This is a new release. 15 | -------------------------------------------------------------------------------- /data-raw/London_examples.R: -------------------------------------------------------------------------------- 1 | # 2 | # devtools::load_all() 3 | # 4 | # data(london_area) 5 | # data(london_cent) 6 | # 7 | # sf::st_crs(zonebuilder::london_area) 8 | # 9 | # crs1 = sf::st_crs(zonebuilder::london_area) 10 | # sf::st_crs(london_area) = 4326 11 | # crs2 = sf::st_crs(london_area) 12 | # 13 | # identical(crs1, crs2) 14 | # waldo::compare(crs1, crs2) 15 | # usethis::use_data(london_area, overwrite = TRUE) 16 | # 17 | # sf::st_crs(london_cent) = 4326 18 | # usethis::use_data(london_cent, overwrite = TRUE) 19 | 20 | ##### London PM10 21 | 22 | tmpdir = "data-raw" 23 | tmpfile = file.path(tmpdir, "LAEI_2016_Emissions_Summary_GIS.zip") 24 | 25 | download.file("https://data.london.gov.uk/download/london-atmospheric-emissions-inventory--laei--2016/0210804b-4945-44ad-ab02-fa087dd4e504/LAEI_2016_Emissions_Summary_GIS.zip", destfile = tmpfile) 26 | 27 | unzip(tmpfile, exdir = tmpdir) 28 | 29 | library(sf) 30 | library(stars) 31 | library(dplyr) 32 | 33 | x1 = st_read("data-raw/2. GIS/SHP/Industrial and Comercial/IC_PM10.shp") 34 | x2 = st_read("data-raw/2. GIS/SHP/Domestic and Miscellaneous/DM_PM10.shp") 35 | x3 = st_read("data-raw/2. GIS/SHP/Other Transport/OtherT_PM10.shp") 36 | 37 | x1 = x1 %>% 38 | mutate(pm10 = ICCDDust16+ICCook16+ ICGasCom16+ICGasLk16+ICLandf16+ ICNRMMC16+ ICNRMMI16+ ICPart116+ ICPart2B16+ICSSWB16+ ICSLFC16+ ICSTW16+ ICWTS16) 39 | 40 | x2 = x2 %>% 41 | mutate(pm10 = MAcFires16 + MAgric16 + MForest16 + DGasComb16 + DHHGard16 + DFuelCom16 + DWBurn16) 42 | 43 | x3 = x3 %>% 44 | mutate(pm10 = OTAviat16 + OTCoShip16 + OTFrRail16 + OTPaShip16 + OTPaRail16 + OTSmPrV16) 45 | 46 | 47 | plot(x1[,"pm10"]) 48 | plot(x2[,"pm10"]) 49 | plot(x3[,"pm10"]) 50 | 51 | x = x1 %>% 52 | mutate(total_pm10 = pm10 + x2$pm10 + x3$pm10) %>% 53 | select(total_pm10) 54 | 55 | 56 | plot(x1[, "total_pm10"]) 57 | 58 | 59 | 60 | london = st_interpolate_aw(x, london_zones, extensive = FALSE) 61 | 62 | 63 | qtm(london, fill = "total_pm10") 64 | london_boroughs = st_interpolate_aw(x, sf::st_transform(spData::lnd, crs = sf::st_crs(x)), extensive = FALSE) 65 | qtm(london_boroughs, fill = "total_pm10") 66 | 67 | 68 | #### London OSM data (e.g. bus stops) 69 | 70 | library(tmap) 71 | # load_all("../tmaptools") 72 | # load_all("../tmap") 73 | 74 | library(sf) 75 | library(stars) 76 | library(tidyverse) 77 | 78 | library(geofabrik) 79 | View(geofabrik_zones) 80 | get_geofabrik("england", download_directory = "~/local/data") 81 | get_geofabrik("Greater London", download_directory = "~/local/data") 82 | 83 | bus_stops = get_geofabrik("Greater London", layer = "points", key = "highway", value = "bus_stop") %>% 84 | st_transform(27700) %>% 85 | mutate(x=1) %>% 86 | select(x) 87 | 88 | london = aggregate(bus_stops, london_zones, FUN = sum) 89 | 90 | 91 | qtm(london, fill = "x") 92 | 93 | 94 | tm_shape(london) + 95 | tm_polygons("x", convert2density = TRUE) 96 | 97 | 98 | ## same for Paris 99 | get_geofabrik("Ile-de-France", download_directory = "~/local/data") 100 | 101 | paris_bbox = geocode_OSM("Paris", as.sf = TRUE, geometry = "bbox") 102 | paris_cent = st_set_geometry(paris_bbox, "point") %>% st_set_crs(4326) 103 | 104 | 105 | paris_zones = zb_zone(paris_bbox, point = paris_cent, intersection = FALSE) 106 | 107 | paris_bus_stops = get_geofabrik("Ile-de-France", layer = "points", key = "highway", value = "bus_stop") %>% 108 | #st_transform(27700) %>% 109 | mutate(x=1) %>% 110 | select(x) 111 | 112 | paris = aggregate(paris_bus_stops, paris_zones, FUN = sum) 113 | 114 | tm_shape(paris) + 115 | tm_polygons("x", convert2density = TRUE) 116 | 117 | 118 | tmap_arrange(tm_shape(london) + tm_polygons("x", convert2density = TRUE), 119 | tm_shape(paris) + tm_polygons("x", convert2density = TRUE)) 120 | 121 | 122 | download.file("https://opendata.paris.fr/explore/dataset/arrondissements/download?format=geojson&timezone=Europe/Berlin&use_labels_for_header=true", destfile = "data-raw/france.json") 123 | 124 | 125 | paris_poly = st_read("data-raw/france.json") %>% 126 | st_transform(4326) 127 | # 128 | # ile_de_france = get_geofabrik("Ile-de-France", layer = "multipolygons", key = "waterway", value = "riverbank") 129 | # 130 | # 131 | # table(st_geometry_type(ile_de_france)) 132 | # 133 | # 134 | # paris_rivers = get_geofabrik("Ile-de-France", layer = "multipolygons", key = "waterway", value = "*", attributes = "waterway") %>% 135 | # #st_transform(27700) %>% 136 | # mutate(x=1) %>% 137 | # select(x) %>% 138 | # st_make_valid() 139 | # 140 | # paris_rivers = get_geofabrik("Ile-de-France", layer = "multipolygons", key = "natural", value = "water", attributes = "natural") %>% 141 | # #st_transform(27700) %>% 142 | # mutate(x=1) %>% 143 | # select(x) %>% 144 | # st_make_valid() 145 | # 146 | # 147 | # paris_rivers = get_geofabrik("Ile-de-France", layer = "multipolygons", key = "natural", value = "water") %>% 148 | # mutate(x=1) %>% 149 | # select(x) %>% 150 | # st_make_valid() 151 | # 152 | # 153 | # library(osmdata) 154 | # 155 | # paris_rivers <- opq ("paris") %>% 156 | # add_osm_feature (key="waterway", value="river") %>% 157 | # osmdata_sf () 158 | # 159 | # 160 | # library(rnaturalearth) 161 | # 162 | # rivers = rnaturalearth::ne_download(scale = 10, type = "rivers_lake_centerlines", category = "physical", returnclass = "sf") %>% 163 | # st_transform(4326) 164 | # 165 | # paris_rivers = st_intersection(rivers, paris_poly) 166 | 167 | 168 | # DOWNLOADED from https://mapcruzin.com/download-shapefile/france-natural-shape.zip 169 | france_rivers = st_read("data-raw/natural.shp") %>% 170 | mutate(is_valid = st_is_valid(.)) %>% 171 | filter(is_valid, type == "riverbank") %>% 172 | st_combine() %>% 173 | st_make_valid() %>% 174 | st_set_crs(4326) 175 | 176 | paris = st_difference(paris_poly, france_rivers) 177 | 178 | 179 | paris_zones = zb_zone(x = paris, point = paris_cent) 180 | 181 | 182 | zb_view(paris_zones) 183 | -------------------------------------------------------------------------------- /data-raw/crashes.R: -------------------------------------------------------------------------------- 1 | remotes::install_github("ropensci/stats19") 2 | library(stats19) 3 | library(dplyr) 4 | library(tmap) 5 | library(tmaptools) 6 | library(sf) 7 | 8 | devtools::load_all() 9 | 10 | ################################################################################ 11 | #### STATS19 12 | ################################################################################ 13 | 14 | ## downloadsSTATS19 data 15 | years = 2010:2018 16 | crashes_all = get_stats19(years, "accidents", output_format = "sf") 17 | casualties_all = get_stats19(years, "casualties") 18 | crashes_joined = dplyr::inner_join(crashes_all, casualties_all) 19 | 20 | ## sf object of killed and seriously injured (ksi) cyclists 21 | ksi_cycl = crashes_joined %>% 22 | mutate(hour = as.numeric(substr(time, 1, 2)) + as.numeric(substr(time, 4, 5)) / 60) %>% 23 | filter(casualty_type == "Cyclist", 24 | !(day_of_week %in% c("Saturday", "Sunday")), 25 | (hour >= 7 & hour <= 9) | (hour >= 16.5 & hour <= 18.5), 26 | accident_severity %in% c("Fatal", "Serious")) %>% 27 | mutate(count=1) %>% # dummy variable needed for aggregate 28 | select(count) 29 | 30 | 31 | ################################################################################ 32 | #### ClockBoard zones of 8 UK cities 33 | ################################################################################ 34 | 35 | # Find the major UK cities using geocode OSM 36 | uk_cities = tmaptools::geocode_OSM(c("London", "Birmingham", "Manchester", "Leeds", "Liverpool", "Newcastle", "Sheffield", "Bristol"), as.sf = TRUE) %>% 37 | sf::st_transform(27700) 38 | 39 | # replace London coordinates by london_cent 40 | uk_cities$geometry[[1]] = zonebuilder::london_cent[[1]] 41 | 42 | # create zones of 5 circles 43 | uk_zones = lapply(1:nrow(uk_cities), function(i) { 44 | zb_zone(x = uk_cities[i,], n_circles = 5, city = uk_cities$query[i]) 45 | }) 46 | names(uk_zones) = uk_cities$query 47 | 48 | 49 | ################################################################################ 50 | #### ClockBoard zones of 8 UK cities 51 | ################################################################################ 52 | 53 | 54 | #### Process cycling distances 55 | regions = pct::pct_regions$region_name 56 | #> [1] "london" "greater-manchester" "liverpool-city-region" 57 | #> [4] "south-yorkshire" "north-east" "west-midlands" 58 | 59 | uk_cities$region = c("london", "west-midlands", "greater-manchester", "west-yorkshire", "liverpool-city-region", "north-east", "south-yorkshire", "avon") 60 | 61 | rnets = lapply(uk_cities$region, function(region) { 62 | pct::get_pct_rnet(region = region) %>% st_transform(27700) %>% 63 | mutate(segment_length_km = as.numeric(sf::st_length(.) / 1e3), 64 | km_cycled_per_working_day = segment_length_km * bicycle) %>% 65 | sf::st_centroid() 66 | }) 67 | 68 | 69 | 70 | ################################################################################ 71 | #### Aggregate ksi and cyling meters to zones 72 | ################################################################################ 73 | 74 | 75 | # Aggregate ksi to zones 76 | ksi_cycl_per_zone = do.call(rbind, lapply(uk_zones, function(z) { 77 | aggregate(ksi_cycl, z, FUN = sum) %>% 78 | mutate(city = z$city, 79 | circle_id = z$circle_id, 80 | segment_id = z$segment_id) %>% 81 | tidyr::replace_na(list(count = 0)) 82 | })) 83 | 84 | # Aggregate cycling 85 | km_per_zone = do.call(rbind, mapply(FUN = function(rnet, z) { 86 | aggregate(rnet["km_cycled_per_working_day"], z, FUN = sum) %>% 87 | mutate(city = z$city, 88 | circle_id = z$circle_id, 89 | segment_id = z$segment_id) 90 | }, rnets, uk_zones, SIMPLIFY = FALSE)) 91 | 92 | # Join and filter 93 | df = ksi_cycl_per_zone %>% 94 | left_join(km_per_zone %>% st_drop_geometry(), by = c("city", "circle_id", "segment_id")) %>% 95 | mutate(ksi_yr = count / length(years), 96 | bkm_yr = (km_cycled_per_working_day / 1e9) * (2*200), 97 | ksi_bkm = ksi_yr / bkm_yr) 98 | 99 | saveRDS(df, file = "data-raw/ksi_bkm_zone.rds") 100 | 101 | ################################################################################ 102 | #### Aux data (needed for plot) 103 | ################################################################################ 104 | 105 | remotes::install_github("ropensci/rnaturalearthhires") 106 | uk = rnaturalearth::ne_countries(scale = 10, country = "United Kingdom", returnclass = "sf") 107 | thames = rnaturalearth::ne_download(scale = 10, type = "rivers_lake_centerlines", returnclass = "sf", category = "physical") %>% 108 | filter(name == "Thames") 109 | 110 | saveRDS(uk, file = "data-raw/uk.rds") 111 | saveRDS(thames, file = "data-raw/thames.rds") 112 | -------------------------------------------------------------------------------- /data-raw/create_NLD_cities.R: -------------------------------------------------------------------------------- 1 | library(sf) 2 | library(tmap) 3 | library(tmaptools) 4 | library(dplyr) 5 | 6 | data("NLD_muni") 7 | 8 | library(geofabrik) 9 | get_geofabrik("netherlands", download_directory = "~/local/data") 10 | cities = get_geofabrik("netherlands", layer = "points", key = "place", value = "city") 11 | 12 | 13 | 14 | # ALTERNATIVE 15 | # 16 | # library(osmdata) 17 | # cities = opq ("netherlands") %>% 18 | # add_osm_feature(key = "place", value = "city") %>% 19 | # osmdata_sf() 20 | 21 | 22 | NLD_cities = cities %>% 23 | rename(geometry = '_ogr_geometry_') %>% 24 | select(name) %>% 25 | mutate(name = replace(name, name == "Den Haag", "'s-Gravenhage")) %>% 26 | left_join(NLD_muni %>% st_drop_geometry() %>% select(population, name) %>% mutate(name = as.character(name)), by = c("name" = "name")) 27 | 28 | 29 | saveRDS(NLD_cities, file = "NLD_cities.rds") 30 | 31 | -------------------------------------------------------------------------------- /data-raw/fix-polygons.R: -------------------------------------------------------------------------------- 1 | # Error in build process: 2 | # https://github.com/zonebuilders/zonebuilder/runs/3009599916#step:9:233 3 | 4 | remotes::install_github("mtennekes/tmap") 5 | 6 | library(zonebuilder) 7 | library(tmap) 8 | library(sf) 9 | z = zb_zone(london_c(), london_a()) 10 | summary(sf::st_is_valid(z)) 11 | plot(z) 12 | mapview::mapview(z) # works 13 | qtm(z) # works 14 | tmap_mode("view") 15 | qtm(z) 16 | qtm(sf::st_make_valid(z)) 17 | tmap_options(check.and.fix = TRUE) 18 | qtm(z) 19 | 20 | lnd_a = zonebuilder::london_a() 21 | sf::st_is_valid(lnd_a) 22 | sf::st_is_valid(london_c()) 23 | 24 | 25 | library(zonebuilder) 26 | library(tmap) 27 | library(sf) 28 | z = zb_zone(london_c(), london_a()) 29 | summary(sf::st_is_valid(z)) 30 | # plot(z) 31 | # mapview::mapview(z) # works 32 | # qtm(z) # works 33 | tmap_mode("view")e 34 | sf::sf_use_s2(use_s2 = FALSE) 35 | qtm(z) 36 | 37 | current_s2 = sf::sf_use_s2() 38 | if(current_s2) { 39 | message("Temporarily setting sf::sf_use_s2(FALSE)") 40 | sf::sf_use_s2(FALSE) 41 | # run the operation 42 | sf::sf_use_s2(TRUE) 43 | } 44 | 45 | 46 | london_a() 47 | 48 | london_area_lonlat = sf::st_transform(sf::st_set_crs(london_area, 27700), 4326) 49 | london_cent_lonlat = sf::st_transform(sf::st_set_crs(london_cent, 27700), 4326) 50 | usethis::use_data(london_area_lonlat) 51 | usethis::use_data(london_cent_lonlat) 52 | 53 | # Update the data docs... 54 | file.edit("R/data.R") 55 | -------------------------------------------------------------------------------- /data-raw/london-figures.R: -------------------------------------------------------------------------------- 1 | # Aim: generate figures comparing ClockBoard zoning system with borough zones 2 | 3 | ##### London PM10 4 | 5 | tmpdir = "data-raw" 6 | tmpfile = file.path(tmpdir, "LAEI_2016_Emissions_Summary_GIS.zip") 7 | 8 | download.file("https://data.london.gov.uk/download/london-atmospheric-emissions-inventory--laei--2016/0210804b-4945-44ad-ab02-fa087dd4e504/LAEI_2016_Emissions_Summary_GIS.zip", destfile = tmpfile) 9 | 10 | unzip(tmpfile, exdir = tmpdir) 11 | 12 | library(sf) 13 | library(stars) 14 | library(dplyr) 15 | library(tmap) 16 | library(zonebuilder) 17 | 18 | x1 = st_read("data-raw/2. GIS/SHP/Industrial and Comercial/IC_PM10.shp") 19 | x2 = st_read("data-raw/2. GIS/SHP/Domestic and Miscellaneous/DM_PM10.shp") 20 | x3 = st_read("data-raw/2. GIS/SHP/Other Transport/OtherT_PM10.shp") 21 | 22 | x1 = x1 %>% 23 | mutate(pm10 = ICCDDust16+ICCook16+ ICGasCom16+ICGasLk16+ICLandf16+ ICNRMMC16+ ICNRMMI16+ ICPart116+ ICPart2B16+ICSSWB16+ ICSLFC16+ ICSTW16+ ICWTS16) 24 | 25 | x2 = x2 %>% 26 | mutate(pm10 = MAcFires16 + MAgric16 + MForest16 + DGasComb16 + DHHGard16 + DFuelCom16 + DWBurn16) 27 | 28 | x3 = x3 %>% 29 | mutate(pm10 = OTAviat16 + OTCoShip16 + OTFrRail16 + OTPaShip16 + OTPaRail16 + OTSmPrV16) 30 | 31 | 32 | plot(x1[,"pm10"]) 33 | plot(x2[,"pm10"]) 34 | plot(x3[,"pm10"]) 35 | 36 | x = x1 %>% 37 | mutate(total_pm10 = pm10 + x2$pm10 + x3$pm10) %>% 38 | select(total_pm10) 39 | 40 | london_zones_zb = zb_zone(london_c(), n_circles = 8) 41 | london_zones = zb_zone(london_c(), n_circles = 8, area = london_a()) 42 | london = st_interpolate_aw(x, london_zones, extensive = FALSE) 43 | london_zb = st_interpolate_aw(x, london_zones_zb, extensive = FALSE) 44 | 45 | london_boroughs = st_interpolate_aw(x, sf::st_transform(spData::lnd, crs = sf::st_crs(x)), extensive = FALSE) 46 | brks_pm10 = c(0, 1, 2, 4, 8, 16) 47 | m0 = tm_shape(x) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + 48 | tm_layout(title = "A", frame = FALSE) 49 | m0l = tm_shape(x) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", title = expression("PM10 (ug/" * m^3 * ")")) + tm_borders(col = "white", lwd = 0.2) + tm_layout(legend.only = TRUE) 50 | m1 = tm_shape(london_boroughs) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 51 | tm_layout(title = "B", frame = FALSE) 52 | m2 = tm_shape(london_zb) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 53 | tm_layout(title = "C", frame = FALSE) 54 | m3 = tm_shape(london) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 55 | tm_layout(title = "D", frame = FALSE) 56 | tm1 = tmap_arrange(m0l, m0, m1, m2, m3, nrow = 1) 57 | tm1 58 | tmap_save(tm1, filename = "cityscale.png", width = 7, height = 2) 59 | magick::image_read("cityscale.png") 60 | piggyback::pb_upload("cityscale.png") 61 | piggyback::pb_download_url("cityscale.png") 62 | # [1] "https://github.com/zonebuilders/zonebuilder/releases/download/v0.0.2.9000/cityscale.png" 63 | 64 | saveRDS(tm1, "tm1.Rds") 65 | piggyback::pb_upload("tm1.Rds") 66 | piggyback::pb_download_url("tm1.Rds") 67 | 68 | # Todo: create a grid to show how the concept works for different datasets, e.g. road casualties 69 | lnd_border = sf::st_union(zonebuilder::london_a()) 70 | n0 = tm_shape(lnd_border) + tm_borders() + tm_graticules() + tm_layout(title = "Raw data/grid") 71 | # generate borough names 72 | london_boroughs 73 | n1 = tm_shape(london_boroughs) + tm_borders() + tm_layout(title = "Raw data/grid") 74 | n0 = tm_shape(lnd_border) + tm_borders() + tm_graticules() + tm_layout(title = "Raw data/grid") 75 | n0 = tm_shape(lnd_border) + tm_borders() + tm_graticules() + tm_layout(title = "Raw data/grid") 76 | 77 | # Bike crashes data: 78 | file.edit("data-raw/crashes.R") 79 | 80 | library(stats19) 81 | 82 | ## downloadsSTATS19 data 83 | years = 2010:2018 84 | crashes_all = get_stats19(years, "accidents", output_format = "sf") 85 | casualties_all = get_stats19(years, "casualties") 86 | crashes_joined = dplyr::inner_join(crashes_all, casualties_all) 87 | 88 | ## sf object of killed and seriously injured (ksi) cyclists 89 | ksi_cycl = crashes_joined %>% 90 | mutate(hour = as.numeric(substr(time, 1, 2)) + as.numeric(substr(time, 4, 5)) / 60) %>% 91 | filter(casualty_type == "Cyclist", 92 | !(day_of_week %in% c("Saturday", "Sunday")), 93 | (hour >= 7 & hour <= 9) | (hour >= 16.5 & hour <= 18.5), 94 | accident_severity %in% c("Fatal", "Serious")) %>% 95 | mutate(count=1) %>% # dummy variable needed for aggregate 96 | select(count) 97 | 98 | nrow(ksi_cycl) 99 | ksi_cycl_wgs = sf::st_transform(ksi_cycl, 4326) 100 | 101 | ksi_boroughs = aggregate(ksi_cycl, london_boroughs, FUN = sum) 102 | ksi_zb = aggregate(ksi_cycl, london_zb, FUN = sum) 103 | ksi_zb_lnd = aggregate(ksi_cycl, london, FUN = sum) 104 | 105 | m0 = tm_shape(x) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + 106 | tm_layout(title = "A", frame = FALSE) 107 | m0l = tm_shape(x) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", title = "Average PM10\nμg/m^3") + tm_borders(col = "white", lwd = 0.2) + tm_layout(legend.only = TRUE) 108 | 109 | m1 = tm_shape(london_boroughs) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 110 | tm_layout(title = "B", frame = FALSE) 111 | m2 = tm_shape(london_zb) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 112 | tm_layout(title = "C", frame = FALSE) 113 | m3 = tm_shape(london) + tm_fill("total_pm10", breaks = brks_pm10, palette = "viridis", legend.show = FALSE) + tm_borders(col = "white", lwd = 0.2) + 114 | tm_layout(title = "D", frame = FALSE) 115 | tm1 = tmap_arrange(m0l, m0, m1, m2, m3, nrow = 1) 116 | tm1 117 | 118 | 119 | #### London OSM data (e.g. bus stops) - todo... 120 | 121 | -------------------------------------------------------------------------------- /data-raw/london.R: -------------------------------------------------------------------------------- 1 | ## code to prepare the datasets used in this package 2 | 3 | library(sf) 4 | library(tidyverse) 5 | 6 | london_area = spData::lnd %>% 7 | st_transform(27700) %>% 8 | st_union() 9 | 10 | # identical(london_area, london_area2) 11 | sf::st_crs(london_area) = NA 12 | 13 | usethis::use_data(london_area, overwrite = TRUE) 14 | 15 | # fix issue with ASCII strings 16 | london_cent = tmaptools::geocode_OSM("london uk", as.sf = TRUE) %>% 17 | st_transform(27700) %>% 18 | st_geometry() 19 | 20 | london_area_centroid = st_centroid(london_area) 21 | plot(london_area) 22 | plot(london_cent, add = TRUE) 23 | plot(london_area_centroid, add = TRUE, col = "red") 24 | sf::st_crs(london_cent) = NA 25 | 26 | usethis::use_data(london_cent, overwrite = TRUE) 27 | 28 | 29 | # Triangular number sequence ---------------------------------------------- 30 | 31 | n = 100 32 | zb_100_triangular_numbers = cumsum(1:100) 33 | usethis::use_data(zb_100_triangular_numbers) 34 | -------------------------------------------------------------------------------- /data/london_area.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/data/london_area.rda -------------------------------------------------------------------------------- /data/london_area_lonlat.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/data/london_area_lonlat.rda -------------------------------------------------------------------------------- /data/london_cent.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/data/london_cent.rda -------------------------------------------------------------------------------- /data/london_cent_lonlat.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/data/london_cent_lonlat.rda -------------------------------------------------------------------------------- /data/zb_100_triangular_numbers.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/data/zb_100_triangular_numbers.rda -------------------------------------------------------------------------------- /inst/test-cities.R: -------------------------------------------------------------------------------- 1 | library(tidyverse) 2 | library(sf) 3 | 4 | cities = rnaturalearth::ne_download("large", type = "populated_places", returnclass = "sf") 5 | bristol_midpoint = cities %>% filter(NAME == "Bristol") %>% 6 | filter(POP_MAX == max(POP_MAX)) 7 | mapview::mapview(bristol_midpoint) 8 | bristol_midpoint_aeq = bristol_midpoint %>% 9 | st_transform(stplanr::geo_select_aeq(.)) 10 | mapview::mapview(bristol_midpoint_aeq) 11 | z = zb_zone(point = bristol_midpoint_aeq, n_circles = 20) 12 | library(tmap) 13 | tmap_mode("view") 14 | qtm(z) 15 | -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-2-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-2-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-3-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-4-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-4-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-4-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-4-2.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-5-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-5-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-5-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-5-2.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-5-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-5-3.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-6-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-6-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-7-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-7-1.png -------------------------------------------------------------------------------- /man/figures/README-unnamed-chunk-8-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zonebuilders/zonebuilder/cd87a182e6f604608520f1f3d17dd551f281c966/man/figures/README-unnamed-chunk-8-1.png -------------------------------------------------------------------------------- /man/geo_select_aeq.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/from_stplanr.R 3 | \name{geo_select_aeq} 4 | \alias{geo_select_aeq} 5 | \alias{geo_select_aeq.sf} 6 | \alias{geo_select_aeq.sfc} 7 | \title{Azimuthal Equidistant Projection} 8 | \usage{ 9 | \method{geo_select_aeq}{sf}(shp) 10 | 11 | \method{geo_select_aeq}{sfc}(shp) 12 | 13 | geo_select_aeq(shp) 14 | } 15 | \arguments{ 16 | \item{shp}{An sf object.} 17 | } 18 | \value{ 19 | A CRS string for an Azimuthal Equidistant projection. 20 | } 21 | \description{ 22 | Returns a CRS string for an Azimuthal Equidistant projection centered on the midpoint of an sf object's coordinates. 23 | } 24 | \details{ 25 | Azimuthal Equidistant Projection 26 | } 27 | -------------------------------------------------------------------------------- /man/london_area.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{london_area} 5 | \alias{london_area} 6 | \alias{london_a} 7 | \alias{london_cent} 8 | \alias{london_c} 9 | \alias{london_cent_lonlat} 10 | \alias{london_area_lonlat} 11 | \title{Region representing London in projected coordinate system} 12 | \usage{ 13 | london_a() 14 | 15 | london_c() 16 | } 17 | \description{ 18 | `london_a()` and `london_c()` return the city boundaries and centre 19 | point of London, respectively. 20 | } 21 | \note{ 22 | `london_a()` returns a projected version of `lnd` in `spDataLarge`. 23 | See the `data-raw` folder in the package's repo to reproduce these datasets 24 | The `lonlat` versions of the data have coordinates in units of degrees. 25 | } 26 | \examples{ 27 | plot(london_a(), reset = FALSE) 28 | plot(london_c(), add = TRUE) 29 | } 30 | \keyword{datasets} 31 | -------------------------------------------------------------------------------- /man/zb_100_triangular_numbers.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{zb_100_triangular_numbers} 5 | \alias{zb_100_triangular_numbers} 6 | \title{The first 100 triangular numbers} 7 | \description{ 8 | The first 100 in the sequence of [triangular numbers](https://en.wikipedia.org/wiki/Triangular_number) 9 | } 10 | \note{ 11 | See the `data-raw` folder in the package's repo to reproduce these datasets 12 | } 13 | \keyword{datasets} 14 | -------------------------------------------------------------------------------- /man/zb_color.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/plot.R 3 | \name{zb_color} 4 | \alias{zb_color} 5 | \title{Generate colors for zones} 6 | \usage{ 7 | zb_color(z, palette = c("rings", "hcl", "dartboard")) 8 | } 9 | \arguments{ 10 | \item{z}{An `sf` object containing zones covering the region} 11 | 12 | \item{palette}{Palette type, one of \code{"hcl"} (a palette based on the HCL color space), \code{"rings"} (a palette which colors the rings using the YlOrBr color brewer palette), \code{"dartboard"} (a palette which resembles a dartboard)} 13 | } 14 | \value{ 15 | A vector of colors 16 | } 17 | \description{ 18 | This function generates colors for zones. 19 | } 20 | \examples{ 21 | z = zb_zone(london_c(), london_a()) 22 | zb_color(z) 23 | plot(z[, "circle_id"], col = zb_color(z)) 24 | } 25 | -------------------------------------------------------------------------------- /man/zb_doughnut.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/doughut.R 3 | \name{zb_doughnut} 4 | \alias{zb_doughnut} 5 | \title{Make doughnuts} 6 | \usage{ 7 | zb_doughnut( 8 | x = NULL, 9 | area = NULL, 10 | n_circles = NA, 11 | distance = 1, 12 | distance_growth = 1 13 | ) 14 | } 15 | \arguments{ 16 | \item{x}{Centre point. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one point, or a name of a city (which is looked up with OSM geocoding).} 17 | 18 | \item{area}{(optional) Area. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one (multi) polygon} 19 | 20 | \item{n_circles}{Number of rings including the central circle. By default 5, unless \code{area} is specified (then it is set automatically to fill the area).} 21 | 22 | \item{distance}{Distance The distances between the circles. For the center circle, it is the distance between the center and the circle. If only one number is specified, \code{distance_growth} determines the increment at which the distances grow for the outer circles.} 23 | 24 | \item{distance_growth}{The rate at which the distances between the circles grow. Only applicable when \code{distance} is one number and \code{n_circles > 1}. See also \code{distance}.} 25 | } 26 | \value{ 27 | An `sf` data frame 28 | } 29 | \description{ 30 | Make doughnuts 31 | } 32 | \examples{ 33 | zb_plot(zb_doughnut(london_c(), london_a())) 34 | } 35 | -------------------------------------------------------------------------------- /man/zb_lines.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/zb_lines.R 3 | \name{zb_lines} 4 | \alias{zb_lines} 5 | \title{Create lines radiating at equal angles from a point} 6 | \usage{ 7 | zb_lines(point, n, starting_angle = 45, distance = 1e+05) 8 | } 9 | \arguments{ 10 | \item{point}{Center point} 11 | 12 | \item{n}{Number of lines} 13 | 14 | \item{starting_angle}{Starting angle} 15 | 16 | \item{distance}{Distance} 17 | } 18 | \value{ 19 | Objects of class `sfc` containing linestring geometries 20 | } 21 | \description{ 22 | Create lines radiating at equal angles from a point 23 | } 24 | \examples{ 25 | point = sf::st_centroid(london_a()) 26 | n = 4 27 | l = zb_lines(point, n) 28 | plot(l) 29 | } 30 | -------------------------------------------------------------------------------- /man/zb_plot.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/plot.R 3 | \name{zb_plot} 4 | \alias{zb_plot} 5 | \title{Plot zones} 6 | \usage{ 7 | zb_plot( 8 | z, 9 | palette = c("rings", "hcl", "dartboard"), 10 | title = NULL, 11 | text_size = c(0.3, 1), 12 | zone_label_thres = 0.002 13 | ) 14 | } 15 | \arguments{ 16 | \item{z}{An `sf` object containing zones covering the region} 17 | 18 | \item{palette}{Palette type, one of \code{"hcl"} (a palette based on the HCL color space), \code{"rings"} (a palette which colors the rings using the YlOrBr color brewer palette), \code{"dartboard"} (a palette which resembles a dartboard)} 19 | 20 | \item{title}{Plot title} 21 | 22 | \item{text_size}{Vector of two numeric values that determine the relative text sizes. The first determines the smallest text size and the second one the largest text size. The largest text size is used for the outermost circle, and the smallest for the central circle in case there are 9 or more circles. If there are less circles, the relative text size is larger (see source code for exact method)} 23 | 24 | \item{zone_label_thres}{This number determines in which zones labels are printed, namely each zone for which the relative area size is larger than `zone_label_thres`.} 25 | } 26 | \value{ 27 | A static plot created using R's base `graphics` package 28 | } 29 | \description{ 30 | This function opens a static map of the zones 31 | } 32 | \examples{ 33 | zb_plot(zb_zone(london_c())) 34 | } 35 | -------------------------------------------------------------------------------- /man/zb_quadrat.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/quadrat.R 3 | \name{zb_quadrat} 4 | \alias{zb_quadrat} 5 | \title{Divide a region into quadrats} 6 | \usage{ 7 | zb_quadrat(x, ncol, nrow = NULL, intersection = TRUE) 8 | } 9 | \arguments{ 10 | \item{x}{x} 11 | 12 | \item{ncol}{ncol} 13 | 14 | \item{nrow}{nrow} 15 | 16 | \item{intersection}{intersection} 17 | } 18 | \value{ 19 | An sf object 20 | } 21 | \description{ 22 | Divide a region into quadrats 23 | } 24 | \examples{ 25 | x = london_a() 26 | c = sf::st_centroid(london_a()) 27 | plot(zb_quadrat(x, ncol = 2), col = 2:5) 28 | plot(c, add = TRUE, col = "white") 29 | plot(zb_quadrat(x, ncol = 3)) 30 | plot(zb_quadrat(x, ncol = 4)) 31 | plot(zb_quadrat(x, ncol = 4, intersection = FALSE)) 32 | } 33 | -------------------------------------------------------------------------------- /man/zb_segment.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/segment.R 3 | \name{zb_segment} 4 | \alias{zb_segment} 5 | \title{Make segments} 6 | \usage{ 7 | zb_segment(x = NULL, area = NULL, n_segments = 12, distance = NA) 8 | } 9 | \arguments{ 10 | \item{x}{Centre point. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one point, or a name of a city (which is looked up with OSM geocoding).} 11 | 12 | \item{area}{(optional) Area. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one (multi) polygon} 13 | 14 | \item{n_segments}{(optional) Number of segments. The number of segments. Either one number which determines the number of segments applied to all circles, or a vector with a number for each circle (which should be a multiple of 4, see also the argument \code{labeling}). By default, the central circle is not segmented (see the argument \code{segment_center}).} 15 | 16 | \item{distance}{Distance The distances between the circles. For the center circle, it is the distance between the center and the circle. If only one number is specified, \code{distance_growth} determines the increment at which the distances grow for the outer circles.} 17 | } 18 | \value{ 19 | An `sf` data frame 20 | } 21 | \description{ 22 | Make segments 23 | } 24 | \examples{ 25 | zb_plot(zb_segment(london_c(), london_a())) 26 | } 27 | -------------------------------------------------------------------------------- /man/zb_zone.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/zone.R 3 | \name{zb_zone} 4 | \alias{zb_zone} 5 | \title{Generate zones covering a region of interest} 6 | \usage{ 7 | zb_zone( 8 | x = NULL, 9 | area = NULL, 10 | n_circles = NA, 11 | n_segments = 12, 12 | distance = 1, 13 | distance_growth = 1, 14 | labeling = NA, 15 | starting_angle = NA, 16 | segment_center = FALSE, 17 | intersection = TRUE, 18 | city = NULL 19 | ) 20 | } 21 | \arguments{ 22 | \item{x}{Centre point. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one point, or a name of a city (which is looked up with OSM geocoding).} 23 | 24 | \item{area}{(optional) Area. Should be an \code{\link[sf:sf]{sf}} or \code{\link[sf:sfc]{sfc}} object containing one (multi) polygon} 25 | 26 | \item{n_circles}{Number of rings including the central circle. By default 5, unless \code{area} is specified (then it is set automatically to fill the area).} 27 | 28 | \item{n_segments}{(optional) Number of segments. The number of segments. Either one number which determines the number of segments applied to all circles, or a vector with a number for each circle (which should be a multiple of 4, see also the argument \code{labeling}). By default, the central circle is not segmented (see the argument \code{segment_center}).} 29 | 30 | \item{distance}{Distance The distances between the circles. For the center circle, it is the distance between the center and the circle. If only one number is specified, \code{distance_growth} determines the increment at which the distances grow for the outer circles.} 31 | 32 | \item{distance_growth}{The rate at which the distances between the circles grow. Only applicable when \code{distance} is one number and \code{n_circles > 1}. See also \code{distance}.} 33 | 34 | \item{labeling}{The labeling of the zones. Either \code{"clock"} which uses the clock ananolgy (i.e. hours 1 to 12) or \code{"NESW"} which uses the cardinal directions N, E, S, W. If the number of segments is 12, the clock labeling is used, and otherwise NESW. Note that the number of segments should be a multiple of four. If, for instance the number of segments is 8, than the segments are labeled N1, N2, E1, E2, S1, S2, W1, and W2.} 35 | 36 | \item{starting_angle}{The angle of the first of the radii that create the segments (degrees). By default, it is either 15 when \code{n_segments} is 12 (i.e. the ClockBoard setting) and -45 otherwise.} 37 | 38 | \item{segment_center}{Should the central circle be divided into segments? `FALSE` by default.} 39 | 40 | \item{intersection}{Should the zones be intersected with the area? \code{TRUE} by default.} 41 | 42 | \item{city}{(optional) Name of the city. If specified, it adds a column `city` to the returned `sf` object.} 43 | } 44 | \value{ 45 | An `sf` object containing zones covering the region 46 | } 47 | \description{ 48 | This function first divides geographic space into [annuli](https://en.wikipedia.org/wiki/Annulus_(mathematics)) 49 | (concentric 2d rings or 'doughnuts') and then subdivides each annulus 50 | into a number of segments. 51 | } 52 | \details{ 53 | By default 12 segments are used for each annuli, resulting in a zoning system that can 54 | be used to refer to segments in [clock position](https://en.wikipedia.org/wiki/Clock_position), 55 | with 12 representing North, 3 representing East, 6 Sounth and 9 Western segments. 56 | } 57 | \examples{ 58 | # default settings 59 | z = zb_zone(london_c(), london_a()) 60 | zb_plot(zb_zone(london_c(), london_a(), n_circles = 2)) 61 | zb_plot(zb_zone(london_c(), london_a(), n_circles = 4, distance = 2, distance_growth = 0)) 62 | zb_plot(zb_zone(london_c(), london_a(), n_circles = 3, n_segments = c(1,4,8))) 63 | } 64 | -------------------------------------------------------------------------------- /paper/figure_cycling_accidents.R: -------------------------------------------------------------------------------- 1 | library(dplyr) 2 | library(tmap) 3 | 4 | # download preprocessed data (processing script /data-raw/crashes.R) 5 | df = readRDS(gzcon(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/ksi_bkm_zone.rds"))) 6 | uk = readRDS(gzcon(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/uk.rds"))) 7 | thames = readRDS(gzcon(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/thames.rds"))) 8 | 9 | # filter: set zones with less than 10,000 km of cycling per yer to NA 10 | df_filtered = df %>% 11 | mutate(ksi_bkm = ifelse((bkm_yr * 1e09) < 2e04, NA, ksi_bkm)) 12 | 13 | tmap_mode("plot") 14 | (tm = tm_shape(uk) + 15 | tm_fill(col = "white") + 16 | tm_shape(df_filtered, is.master = TRUE) + 17 | tm_polygons("ksi_bkm", breaks = c(0, 1000, 2500, 5000, 7500, 12500), textNA = "Too little cycling", title = "Killed and seriously injured\ncyclists per billion cycled\nkilometers") + 18 | tm_facets(by = "city", ncol=4) + 19 | tm_shape(uk) + 20 | tm_borders(lwd = 1, col = "black", lty = 3) + 21 | tm_shape(thames) + 22 | tm_lines(lwd = 1, col = "black", lty = 3) + 23 | tm_layout(bg.color = "lightblue", legend.outside.size = .23, outer.margins = 0, legend.text.size = .7, legend.title.size = 1, panel.label.size = 1.2)) 24 | 25 | tmap_save(tm, filename = "paper/figures/cycling_accidents.pdf", width = 5, height = 3, scale = .6) 26 | -------------------------------------------------------------------------------- /paper/figure_world_cities.R: -------------------------------------------------------------------------------- 1 | library(tmap) 2 | library(dplyr) 3 | library(terra) 4 | library(stars) 5 | library(maptiles) 6 | devtools::load_all() 7 | localdir = "~/local/data/worldpop/" 8 | 9 | 10 | data(metro) 11 | data(World) 12 | 13 | ttm() 14 | qtm(metro, symbols.size = "pop2020") 15 | 16 | x = c( 17 | "Moscow", 18 | "Istanbul", 19 | "London", 20 | "Paris", 21 | "Rome", 22 | "Berlin", 23 | "Madrid", 24 | "Amsterdam", 25 | 26 | "Cairo", 27 | "Nairobi", 28 | "Johannesburg", 29 | "Lagos", 30 | "Kinshasa", 31 | 32 | "Toronto", 33 | "Boston", 34 | "New York", 35 | "Chicago", 36 | "Los Angeles", 37 | 38 | "Mexico City", 39 | "Rio de Janeiro", 40 | "Buenos Aires", 41 | "Bogota", 42 | "Sao Paulo", 43 | 44 | 45 | "Tehran", 46 | "Tokyo", 47 | "Beijing", 48 | "Hong Kong", 49 | "Bangkok", 50 | "Singapore", 51 | "Dubai", 52 | "Delhi", 53 | "Mumbai", 54 | "Kuala Lumpur", 55 | "Seoul", 56 | "Shenzhen", 57 | 58 | "Sydney") 59 | 60 | df = metro[match(x, metro$name), ] 61 | df$country_area = World$area[match(df$iso_a3, World$iso_a3)] 62 | df$country_area[is.na(df$country_area)] = 0 63 | 64 | df = df[order(df$country_area), c("name", "name_long", "iso_a3", "country_area")] 65 | 66 | 67 | df$continent = World$continent[match(df$iso_a3, World$iso_a3)] 68 | df$continent[df$name == "Hong Kong"] = "Asia" 69 | df$continent[df$name == "Singapore"] = "Asia" 70 | 71 | 72 | ################# 73 | ## Download WorldPop data 74 | ################# 75 | 76 | 77 | # some have other urls: https://data.worldpop.org/GIS/Population/Global_2000_2020_Constrained/2020/maxar_v1/NGA/nga_ppp_2020_UNadj_constrained.tif 78 | 79 | df = df %>% 80 | mutate(f = paste0("https://data.worldpop.org/GIS/Population/Global_2000_2020_Constrained/2020/BSGM/", iso_a3, "/", tolower(iso_a3), "_ppp_2020_UNadj_constrained.tif")) 81 | 82 | for (f in df$f) { 83 | g = paste0(localdir, basename(f)) 84 | if (!file.exists(g)) { 85 | tryCatch({ 86 | curl::curl_download(url = f, destfile = g) 87 | }, error = function(e) { 88 | NULL 89 | }) 90 | } 91 | } 92 | 93 | #https://data.worldpop.org/GIS/Population/Global_2000_2020/2020/RUS/rus_ppp_2020.tif 94 | #https://data.worldpop.org/GIS/Population/Global_2000_2020_Constrained/2020/BSGM/GBR/gbr_ppp_2020_UNadj_constrained.tif 95 | 96 | 97 | ################# 98 | ## OSM centres 99 | ################# 100 | 101 | geo = tmaptools::geocode_OSM(df$name, as.sf = TRUE) 102 | # update cities 103 | geo$point[geo$query == "London"] = st_transform(london_c(), crs = 4326) 104 | 105 | 106 | ## check which ones are better: OSM or metro dataset 107 | geo$id = 1:nrow(geo) 108 | 109 | qtm(geo, dots.col = "blue", text = "id", text.ymod = 1) + qtm(df, dots.col = "red") 110 | 111 | blue_better = c("Moscow", "London", "Rome", "Berlin", "Madrid", "Amsterdam", 112 | "Toronto", "Boston", "New York", "Chicago", "Mexico City", "Rio de Janeiro", 113 | "Buenos Aires", "Bogota", "Cairo", "Nairobi", "Johannesburg", 114 | "Tokyo", "Beijing", "Hong Kong", "Bangkok", "Singapore", "Dubai", 115 | "Delhi", "Kuala Lumpur", "Seoul", "Sydney", "Sao Paulo", "Tehran", "Mumbai", "Shenzhen", "Lagos", "Kinshasa") 116 | 117 | ids = match(blue_better, df$name) 118 | 119 | df$geometry[ids] = geo$point[ids] 120 | 121 | ################# 122 | ## Zones 123 | ################# 124 | 125 | ## create zones 126 | zns = lapply(seq_len(nrow(df)), function(i) { 127 | zonebuilder::zb_zone(df[i, ], n_circles = 10) 128 | }) 129 | names(zns) = x 130 | 131 | 132 | ################# 133 | ## Background tiles 134 | ################# 135 | 136 | ## download static basemaps 137 | # basemaps = lapply(zns, function(z) { 138 | # maptiles::get_tiles(z, "CartoDB.VoyagerNoLabels", zoom = 9) 139 | # # tm = tmaptools::read_osm(z, zoom = 9, type = 'stamen-watercolor', ext = 1.05) 140 | # }) 141 | 142 | ################# 143 | ## Load worldpop data 144 | ################# 145 | 146 | popdata = lapply(1:nrow(df), function(i) { 147 | #if (i==31) browser() 148 | bn = basename(df$f[i]) 149 | g = file.path(localdir, bn) 150 | if (file.exists(g)) { 151 | a1 = terra::rast(g) 152 | e = ext(as(zns[[i]], "SpatVector")) 153 | a1 = crop(a1, e) 154 | 155 | a1 = extend(a1, e) 156 | a1[][is.na(a1[]) | is.nan(a1[])] = 0 157 | 158 | } else { 159 | a1 = NULL 160 | } 161 | a1 162 | }) 163 | df$has_pop_data = !sapply(popdata, is.null) 164 | 165 | 166 | ################# 167 | ## City admin borders 168 | ################# 169 | admin = vector(mode = "list", length = nrow(df)) 170 | names(admin) = df$name 171 | 172 | # Mexico 173 | # source: https://datacatalog.worldbank.org/dataset/mexico-municipalities-2012 174 | mex = read_sf("sandbox/Muni_2012gw.shp") 175 | mex = mex[mex$CVE_ENT == "09", ] 176 | mex = st_union(mex) 177 | admin$`Mexico City` = mex 178 | 179 | # London 180 | admin$London = zonebuilder::london_a() 181 | 182 | # Amsterdam 183 | data("NLD_muni") 184 | ams = sf::st_transform(NLD_muni$geometry[which(NLD_muni$name == "Amsterdam")], crs = 4326) 185 | admin$Amsterdam = ams 186 | 187 | # Bangkok 188 | #https://data.humdata.org/dataset/thailand-administrative-boundaries 189 | 190 | bk = st_read("sandbox/bangkok/tha_admbnda_adm1_rtsd_20190221.shp") %>% 191 | filter(ADM1_EN == "Bangkok") 192 | admin$Bangkok = bk 193 | 194 | #Berlin 195 | #https://daten.gdz.bkg.bund.de/produkte/vg/vg250_ebenen_0101/aktuell/vg250_01-01.geo84.shape.ebenen.zip 196 | 197 | bl = st_read("sandbox/berlin/vg250_01-01.geo84.shape.ebenen/vg250_ebenen_0101/VG250_GEM.shp") 198 | bl = bl[which(bl$GEN == "Berlin"),] 199 | admin$Berlin = bl 200 | 201 | #Paris 202 | # https://www.data.gouv.fr/en/datasets/arrondissements-1/ 203 | pr = st_union(st_read("sandbox/paris/arrondissements.shp")) 204 | admin$Paris = pr 205 | 206 | 207 | rm(mex, ams, bk, bl, pr) 208 | 209 | 210 | ################# 211 | ## Plots 212 | ################# 213 | 214 | plotdir = "paper/figures/" 215 | 216 | df$circles = 7 217 | 218 | 219 | # Normalize popdata to people/km2 220 | popdata_norm = lapply(popdata, function(p) { 221 | #km2 = as.numeric(st_area(tmaptools::bb_poly(st_bbox(p)))) / 1e6 222 | km2 = sum(cellSize(p)[]) / 1e6 223 | km2_cell = km2 / ncell(p) 224 | p[] = p[] / km2_cell 225 | p[][is.na(p[])] = 0 226 | p 227 | }) 228 | 229 | if (FALSE) { 230 | alldata = as.vector(unlist(lapply(popdata_norm, function(p) { 231 | p[] 232 | }))) 233 | 234 | kdata = kmeans(alldata, centers = 7) 235 | kdata$centers 236 | tdata = round(table(kdata$cluster)/length(alldata)*100, 2) 237 | 238 | pquan = t(sapply(popdata_norm, function(p) { 239 | as.vector(quantile(na.omit(p[]))) 240 | })) 241 | colSums(pquan) / 30 242 | } 243 | 244 | 245 | 246 | brks = c(0, 1000, 2500, 5000, 8000, 15000, 30000, Inf) 247 | pal = c("#FFFFFF", pals::brewer.blues(12)[seq(2,12,by=2)]) 248 | acol = pals::alphabet(26)[26] 249 | 250 | 251 | qtm_border = function(shp, width = 2, col = "black", master = FALSE) { 252 | tm_shape(shp, is.master = master) + 253 | tm_borders(lwd = (width * 2) + 1, col = "white") + 254 | tm_shape(shp) + 255 | tm_borders(lwd = width, col = col) 256 | } 257 | 258 | 259 | continents = pals::brewer.pastel1(8)[c(5,6,2,3,1,NA,8)] 260 | names(continents) = setdiff(levels(df$continent), "Antarctica") 261 | 262 | nms = sort(df$name) 263 | 264 | tml = lapply(match(nms, df$name), function(i) { 265 | if (df$has_pop_data[i]) { 266 | tm = tm_shape(popdata_norm[[i]]) + 267 | tm_raster(breaks = brks, title = "pop/km2", palette = pal, legend.show = FALSE) 268 | 269 | # if (!is.null(admin[[i]])) { 270 | # tm = tm + qtm_border(admin[[i]], col = acol, width = 3) 271 | # } 272 | 273 | tm = tm + qtm_border(zns[[i]] %>% filter(circle_id <= df$circles[i]), master = TRUE) + tm_layout(frame = FALSE, outer.margins = 0, scale = 0.5, legend.position = c("right", "bottom"), panel.show = TRUE, panel.label.bg.color = continents[as.character(df$continent[i])], panel.labels = df$name[i], panel.label.size = 1.4) 274 | } else { 275 | tm = NULL 276 | } 277 | return(tm) 278 | }) 279 | 280 | with_borders = which(!sapply(admin, is.null)) 281 | 282 | 283 | tml1 = lapply(match(names(with_borders), df$name), function(i) { 284 | if (df$has_pop_data[i]) { 285 | tm = tm_shape(popdata_norm[[i]]) + 286 | tm_raster(breaks = brks, title = "pop/km2", palette = pal, legend.show = FALSE) 287 | 288 | if (!is.null(admin[[i]])) { 289 | tm = tm + qtm_border(admin[[i]], col = acol, width = 3) 290 | } 291 | 292 | tm = tm + qtm_border(zns[[i]] %>% filter(circle_id <= df$circles[i]), master = TRUE) + tm_layout(frame = FALSE, outer.margins = 0, scale = 0.6, legend.position = c("right", "bottom"), panel.show = TRUE, panel.label.bg.color = continents[as.character(df$continent[i])], panel.labels = df$name[i], panel.label.size = 1.4) 293 | } else { 294 | tm = NULL 295 | } 296 | return(tm) 297 | }) 298 | 299 | 300 | tml2 = lapply(match(nms, df$name), function(i) { 301 | if (df$has_pop_data[i]) { 302 | tm = tm_shape(popdata_norm[[i]]) + 303 | tm_raster(breaks = brks, title = "pop/km2", palette = pal, legend.show = FALSE) 304 | 305 | # if (!is.null(admin[[i]])) { 306 | # tm = tm + qtm_border(admin[[i]], col = acol, width = 3) 307 | # } 308 | 309 | tm = tm + qtm_border(zns[[i]] %>% filter(circle_id <= df$circles[i]), master = TRUE) + tm_layout(frame = FALSE, outer.margins = 0, scale = 0.5, legend.position = c("right", "bottom"), panel.show = TRUE, panel.label.bg.color = continents[as.character(df$continent[i])], panel.labels = df$name[i], panel.label.size = 1.4) 310 | } else { 311 | tm = NULL 312 | } 313 | return(tm) 314 | }) 315 | 316 | 317 | tma1 = tmap_arrange(tml1, ncol = 3, outer.margins = c(0, 0.02, 0, 0.02)) 318 | tmap_save(tma1, paste0(plotdir, "cities_p1.png"), width = 1800, height = 1400) 319 | 320 | tma2 = tmap_arrange(tml2, ncol = 6, outer.margins = c(0, 0.02, 0, 0.02)) 321 | tmap_save(tma2, paste0(plotdir, "cities_p2.png"), width = 1800, height = 2200) 322 | 323 | -------------------------------------------------------------------------------- /paper/foss4g/foss4g-zonebuilder-abstract.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Zonebuilders: cross-platform and language-agnostic tools for generating zoning systems for urban analysis 4 | 5 | Zones are key building blocks used for analysis and creating models (mental and statistical) of urban and environmental systems. 6 | Used in a range of fields from biodiversity assessment to transport planning, spatially contiguous areal units break-up continuous space into discrete chunks. 7 | Many methods *rely* on good zoning systems, including origin-destination analysis, geographically weighted regression, and choropleth visualisation. 8 | 9 | Open access administrative boundaries are increasingly available through national databases and OpenStreetMap but are often inappropriate to geographic research, analysis and map making needs, being often: based on arbitrary factors; inconsistent between different cities/regions; and of highly variable sizes and shapes. 10 | 11 | This talk outlines an approach to tackle these problems: tools that can auto-generate zones based on minimal input data. 12 | We propose cross-platform and language agnostic implementations to enable a diverse range of people to generate bespoke zoning systems for their needs based on the understanding that accessibility, flexibility and extensibility are key to usability. 13 | We also demonstrate working tools that take a step in this direction which at the time of writing includes: 14 | 15 | - a core library written in Rust with small and fast binaries available for all major operating systems 16 | - an R package (published on the Comprehensive R Archive Network, CRAN) that also enables visualisation of zoning systems 17 | 18 | We plan to create a Python Package, a QGIS plugin and web user interface based on the core library and welcome suggestions and contributions via our GitHub organization: . 19 | Based on the experience of developing these tools, we will discuss next steps towards accessible and flexible zone building tools and language/platform agnostic tools for geospatial work in general. 20 | 21 | We conclude that the approach, based on low-level and easy-to-distribute tools that can be used in multiple free and open source frameworks, could be applied to other domains and help join diverse communities (e.g. based on R, Python or QGIS) through use of shared low-level, cross-platform and future-proof implementations. 22 | 23 | 24 | The source code underlying the approach can be found at 25 | 26 | # Description 27 | 28 | The zonebuilder approach aims to minimise input data requirements, generate consistent zones comparable between widely varying urban systems, and provide geographically contiguous areal units. 29 | Zones with appropriate sizes, shapes and coverage are needed for a range of applications. 30 | However, appropriate areal units are often hard to find and, in cases where no pre-existing zoning systems can be found, to create. 31 | The motivations for generating a new zoning system and use cases envisioned include: 32 | 33 | - Locating cities. 34 | Automated zoning systems based on a clear centrepoint can support map interpretation by making it immediately clear where the city centre is, and what the scale of the city is. 35 | 36 | - Reference system of everyday life. 37 | The zone name contains information about the distance to the center as well as the cardinal direction. 38 | E.g "I live in C12 and work in B3." or "The train station is in the center and our hotel is in B7". 39 | Moreover, the zones indicate whether walking and cycling is a feasibly option regarding the distance. 40 | 41 | - Aggregation for descriptive statistics / comparability over cities. 42 | By using the zoning system to aggregate statistics (e.g. on population density, air quality, bicycle use, number of dwellings), cities can easily be compared to each other. 43 | 44 | - Modelling urban cities. 45 | The zoning system can be used to model urban mobility. 46 | 47 | We demonstrate a particular implementation of the approach and show how users can use the tools to generate custom zoning systems suited to diverse needs: the ClockBoard zoning system, which consists of 12 segments divided by concentric rings of increasing distance, is highlighted as an example. 48 | The zonebuilder approach also enables people to propose and demonstrate alternative zoning systems. 49 | 50 | # Notes 51 | 52 | This paper was submitted at a time when the R spatial community is seeking to engage more proactively in the wider FOSS4G movement and aims to take a small step in that direction. See for details. 53 | 54 | # Authors 55 | 56 | Robin Lovelace (1) 57 | 58 | Martijn Tennekes (2) 59 | 60 | Dustin Carlino (3) 61 | 62 | (1) University of Leeds 63 | 64 | (2) Statistics Netherlands 65 | 66 | (3) Lead developer of A/B Street 67 | 68 | -------------------------------------------------------------------------------- /paper/tables.R: -------------------------------------------------------------------------------- 1 | library(xtable) 2 | df = data.frame('Number of rings' = 1:9, 'Diameter across (km)' = zb_100_triangular_numbers[1:9]*2, 'Area (sq. km)' = round(zb_100_triangular_numbers[1:9]^2*pi, 2)) 3 | xtable::xtable(df) 4 | -------------------------------------------------------------------------------- /sandbox/Dutch_city_population.R: -------------------------------------------------------------------------------- 1 | library(tmap) 2 | library(sf) 3 | library(tidyverse) 4 | 5 | brt = st_read("~/local/data/buurt_2018_v2.shp") 6 | 7 | brt = brt %>% 8 | filter(WATER == "NEE") %>% 9 | select(BEV_DICHTH) %>% 10 | mutate(brt_id = 1L:n(), 11 | BEV_DICHTH = ifelse(BEV_DICHTH < 0, 0, BEV_DICHTH)) 12 | 13 | NLD_cities = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/NLD_cities.Rds")) %>% 14 | arrange(desc(population)) 15 | 16 | 17 | zbs = do.call(rbind, lapply(1:nrow(NLD_cities), function(i) { 18 | ci = NLD_cities[i, ] 19 | 20 | # Amsterdam 5, Eindhoven-Rotterdam 4, Roermond-Zeeland 2, others 3 21 | nrings = ifelse(ci$population < 60000, 2, 22 | ifelse(ci$population < 220000, 3, 23 | ifelse(ci$population < 800000, 4, 5))) 24 | 25 | zb = zb_zone(point = ci, n_circles = nrings) %>% 26 | mutate(name = ci$name, 27 | labelplus = paste(ci$name, label, sep = "_")) 28 | 29 | zb 30 | })) 31 | 32 | 33 | 34 | 35 | cities = lapply(unique(zbs$name), function(city) { 36 | x = zbs %>% filter(name == city) %>% 37 | mutate(zoneid = 1L:n()) %>% 38 | st_transform(28992) %>% 39 | sf::st_make_valid() %>% 40 | st_cast("MULTIPOLYGON") 41 | 42 | x_brt = sf::st_intersection(brt, x) %>% 43 | mutate(area = as.numeric(st_area(.)), 44 | pop = BEV_DICHTH * (area / 10^6)) 45 | 46 | pop_totals = x_brt %>% 47 | st_drop_geometry() %>% 48 | group_by(zoneid) %>% 49 | summarise(pop = sum(pop)) 50 | 51 | x %>% left_join(pop_totals, by = "zoneid") 52 | }) 53 | names(cities) = unique(zbs$name) 54 | 55 | tmap_options(limits = c(facets.view = 10)) 56 | 57 | tms = lapply(cities[1:10], function(city) { 58 | tm_shape(city) + 59 | tm_polygons("pop", convert2density = TRUE, breaks = c(0, 2.5, 5, 7.5, 10, 15, 25)*1000, title = city$name[1]) 60 | }) 61 | 62 | tmap_arrange(tms) 63 | 64 | 65 | citiesSF = do.call(rbind, cities) 66 | 67 | citiesSF$name = factor(citiesSF$name, levels = unique(zbs$name)) 68 | 69 | tm = tm_shape(citiesSF) + 70 | tm_polygons("pop", convert2density = TRUE, breaks = c(0, 2.5, 5, 10, 15, 25)*1000, title = "Population per km2") + 71 | tm_facets(by = "name", ncol = 5) 72 | 73 | tmap_save(tm, filename = "Dutch_city_population.png", width = 5, height = 8, scale = 0.75) 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /tic.R: -------------------------------------------------------------------------------- 1 | # installs dependencies, runs R CMD check, runs covr::codecov() 2 | do_package_checks() 3 | 4 | if (ci_on_ghactions() && ci_has_env("BUILD_PKGDOWN")) { 5 | # creates pkgdown site and pushes to gh-pages branch 6 | # only for the runner with the "BUILD_PKGDOWN" env var set 7 | do_pkgdown() 8 | } 9 | -------------------------------------------------------------------------------- /vignettes/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.R 3 | -------------------------------------------------------------------------------- /vignettes/demo_dutch_cities.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Combining zoning systems" 3 | output: rmarkdown::html_vignette 4 | vignette: > 5 | %\VignetteIndexEntry{Combining zoning systems} 6 | %\VignetteEncoding{UTF-8} 7 | %\VignetteEngine{knitr::rmarkdown} 8 | editor_options: 9 | chunk_output_type: console 10 | --- 11 | 12 | ```{r, include = FALSE} 13 | knitr::opts_chunk$set( 14 | eval = FALSE, 15 | collapse = TRUE, 16 | comment = "#>" 17 | ) 18 | ``` 19 | 20 | This vignettes demonstrates possibilities when zoning systems from different cities meet. 21 | It raises the question: how should different systems be combined geographically? 22 | 23 | You need the latest version of the package: 24 | 25 | ```{r, eval=FALSE} 26 | remotes::install_github("zonebuilders/zonebuilder") 27 | ``` 28 | 29 | 30 | For this demo, we need the following libraries: 31 | 32 | 33 | ```{r setup} 34 | library(zonebuilder) # for the zoning system 35 | library(sf) # for processing spatial data 36 | library(dplyr) # for processing general data 37 | library(tmap) # for visualizing spatial data 38 | ``` 39 | 40 | 41 | We will apply the zoning system to the main Dutch cities, and analyse commuting patterns between the zones. The data can be read as follows: 42 | 43 | 44 | ```{r} 45 | NLD_cities = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/NLD_cities.Rds")) 46 | NLD_wijk_od = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/NLD_wijk_od.Rds")) 47 | NLD_wijk_centroids = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/NLD_wijk_centroids.Rds")) 48 | ``` 49 | 50 | Let's take a look at the `NLD_cities` data: 51 | 52 | ```{r} 53 | NLD_cities %>% arrange(desc(population)) 54 | ``` 55 | 56 | ...and plot it on an interactive map: 57 | 58 | ```{r} 59 | tmap_mode("view") # enable interactive mode in tmap 60 | qtm(NLD_cities, symbols.size = "population") 61 | ``` 62 | 63 | 64 | The following code chunk generated zones for the Dutch cities: 65 | 66 | 67 | ```{r} 68 | zbs = do.call(rbind, lapply(1:nrow(NLD_cities), function(i) { 69 | ci = NLD_cities[i, ] 70 | 71 | # Amsterdam 5, Eindhoven-Rotterdam 4, Roermond-Zeeland 2, others 3 72 | nrings = ifelse(ci$population < 60000, 2, 73 | ifelse(ci$population < 220000, 3, 74 | ifelse(ci$population < 800000, 4, 5))) 75 | 76 | zb = zb_zone(x = ci, n_circles = nrings) %>% 77 | mutate(name = ci$name, 78 | labelplus = paste(ci$name, label, sep = "_")) 79 | 80 | zb 81 | })) 82 | ``` 83 | 84 | 85 | ```{r} 86 | tm_basemap("OpenStreetMap") + 87 | tm_shape(zbs) + 88 | tm_polygons(col = "circle_id", id = "labelplus", style = "cat", palette = "YlOrBr", alpha = 0.7) + 89 | tm_scale_bar() 90 | ``` 91 | 92 | -------------------------------------------------------------------------------- /vignettes/hackathon.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Zonebuilder hackathon" 3 | output: rmarkdown::html_vignette 4 | vignette: > 5 | %\VignetteIndexEntry{Zonebuilder hackathon} 6 | %\VignetteEngine{knitr::rmarkdown} 7 | %\VignetteEncoding{UTF-8} 8 | --- 9 | 10 | ```{r, include = FALSE} 11 | knitr::opts_chunk$set( 12 | eval = FALSE, 13 | collapse = TRUE, 14 | comment = "#>" 15 | ) 16 | ``` 17 | 18 | ## Introduction 19 | 20 | **zonebuilder** is a package for exploring zoning systems. 21 | This document contains ideas on challenges that can be tackled using zoning systems, example code to get started and suggestions of how to get involved. 22 | 23 | ## Setup 24 | 25 | To ensure that you have the necessary software installed, try running the following lines of code in an R console (you need the latest version of the package): 26 | 27 | ```{r, eval=FALSE} 28 | remotes::install_github("zonebuilders/zonebuilder") 29 | remotes::install_github("itsleeds/pct") 30 | ``` 31 | 32 | 33 | ```{r setup} 34 | library(zonebuilder) 35 | library(dplyr) 36 | library(tmap) 37 | tmap_mode("view") 38 | ``` 39 | 40 | 41 | Ideas for hackathon: 42 | 43 | - Explore results from automated zoning of a range of cities 44 | - How many supermarkets in different zones of the city? 45 | - Explore how mode and distance of travel changes depending on city zones 46 | - Explore how to calculate traveltimes from zone to zone for different travel modalities 47 | - Explore how traveltimes from cityzones to citycentre for different modalities for multiple cities affect number of commuters 48 | - Find a datadriven method for defining the city centre (e.g. density of adresses, population density, building date, number of companies, number of nodes of the road infrastructure). 49 | - Number of houses vs estimated population in different zones using UK data 50 | - Demonstrate aggregagation of OD data into zoning system 51 | 52 | ```{r, eval=FALSE} 53 | zones_west_yorkshire = pct::get_pct_zones("west-yorkshire") 54 | zones_leeds_official = zones_west_yorkshire %>% filter(lad_name == "Leeds") 55 | ``` 56 | 57 | 58 | ```{r, eval=FALSE} 59 | leeds_centroid = tmaptools::geocode_OSM(q = "Leeds", as.sf = TRUE) 60 | ``` 61 | 62 | ```{r, echo=FALSE, eval=FALSE} 63 | saveRDS(zones_leeds_official, "zones_leeds_official.Rds") 64 | piggyback::pb_upload("zones_leeds_official.Rds") 65 | piggyback::pb_download_url("zones_leeds_official.Rds") 66 | saveRDS(zones_leeds_zb, "zones_leeds_zb.Rds") 67 | piggyback::pb_upload("zones_leeds_zb.Rds") 68 | ``` 69 | 70 | You can get and plot the output of the preceding code chunk with: 71 | 72 | ```{r} 73 | leeds_centroid = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/leeds_centroid.Rds")) 74 | zones_leeds_official = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/zones_leeds_official.Rds")) 75 | zone_outline = zones_leeds_official %>% 76 | sf::st_buffer(dist = 0.0001) %>% 77 | sf::st_union() 78 | zones_leeds_zb = zb_zone(x = zone_outline, point = leeds_centroid) 79 | tm_shape(zones_leeds_zb) + tm_borders() + 80 | tm_text("label") 81 | ``` 82 | 83 | ## Explore results of automated zoning system 84 | 85 | ### Generate zones for different cities 86 | 87 | The zoning systems works well to represent cities that have a clear centre (monocentric cities) with city zones connected by radial and circular orbital routes, such as Erbil: 88 | 89 | ```{r} 90 | city_name = "Erbil" 91 | city_centre = tmaptools::geocode_OSM(city_name, as.sf = TRUE) 92 | zones_erbil = zb_zone(point = city_centre, n_circles = 5) 93 | tm_shape(zones_erbil) + tm_borders() + 94 | tm_text("label") + 95 | tm_basemap(server = leaflet::providers$OpenStreetMap) 96 | # zb_view(zones_erbil) 97 | ``` 98 | 99 | The zoning system works less well for other cities, e.g. cities with asymetric and polycentric urban morphologies such as Dhakar, shown below. 100 | 101 | ```{r} 102 | city_name = "Dhaka" 103 | city_centre = tmaptools::geocode_OSM(city_name, as.sf = TRUE) 104 | zones_dhaka = zb_zone(point = city_centre, n_circles = 5) 105 | tm_shape(zones_dhaka) + tm_borders() + 106 | tm_text("label") + 107 | tm_basemap(server = leaflet::providers$OpenStreetMap) 108 | ``` 109 | 110 | 111 | ```{r, eval=FALSE, echo=FALSE} 112 | # Aim: get the largest cities in the world 113 | cities_worldwide = rnaturalearth::ne_download(scale = 10, type = "populated_places") 114 | 115 | city_names = c( 116 | "Dheli", 117 | "Mexico City", 118 | "Tokyo", 119 | "Beijing", 120 | ) 121 | 122 | city_name = "Dheli" 123 | city_centre = tmaptools::geocode_OSM(city_name, as.sf = TRUE) 124 | zones_dhaka = zb_zone(point = city_centre, n_circles = 5) 125 | tm_shape(zones_dhaka) + tm_borders() + 126 | tm_text("label") 127 | ``` 128 | 129 | ### How many supermarkets in different zones of the city? 130 | 131 | 132 | ```{r, eval=FALSE} 133 | devtools::install_github("itsleeds/geofabrik") 134 | library(geofabrik) 135 | leeds_shop_polygons = get_geofabrik(leeds_centroid, layer = "multipolygons", key = "shop", value = "supermarket") 136 | ``` 137 | 138 | ```{r, eval=FALSE, echo=FALSE} 139 | saveRDS(leeds_shop_polygons, "leeds_shop_polygons.Rds") 140 | piggyback::pb_upload("leeds_shop_polygons.Rds") 141 | piggyback::pb_download_url("leeds_shop_polygons.Rds") 142 | saveRDS(leeds_centroid, "leeds_centroid.Rds") 143 | piggyback::pb_upload("leeds_centroid.Rds") 144 | piggyback::pb_download_url("leeds_centroid.Rds") 145 | # leeds_roads = get_geofabrik(name = leeds_centroid) 146 | # leeds_shop_points = get_geofabrik(leeds_centroid, layer = "points", key = "amenity", value = "shop") 147 | ``` 148 | 149 | We have pre-saved the results as follows: 150 | 151 | ```{r} 152 | leeds_shop_polygons = readRDS(url("https://github.com/zonebuilders/zonebuilder/releases/download/0.0.1/leeds_shop_polygons.Rds")) 153 | z = zb_zone(zones_leeds_official, point = leeds_centroid, n_circles = 5) 154 | z_supermarkets = aggregate(leeds_shop_polygons["shop"], z, FUN = length) 155 | tm_shape(z_supermarkets) + 156 | tm_polygons("shop", alpha = 0.5, title = "N. Supermarkets") 157 | ``` 158 | 159 | 160 | 161 | #### Explore how mode and distance of travel changes depending on city zones 162 | 163 | ```{r} 164 | 165 | ``` 166 | 167 | 168 | Robin to create UK example 169 | 170 | 171 | #### Demo Dutch cities and commuting 172 | 173 | See [demo Dutch cities vignette](https://zonebuilders.github.io/zonebuilder/articles/demo_dutch_cities.html) 174 | -------------------------------------------------------------------------------- /vignettes/references.bib: -------------------------------------------------------------------------------- 1 | @Article{bondaruk_assessing_2020, 2 | title = {Assessing the state of the art in {Discrete} {Global} {Grid} {Systems}: {OGC} criteria and present functionality}, 3 | volume = {74}, 4 | issn = {1195-1036}, 5 | shorttitle = {Assessing the state of the art in {Discrete} {Global} {Grid} {Systems}}, 6 | number = {1}, 7 | urldate = {2021-08-12}, 8 | journal = {Geomatica}, 9 | author = {Ben Bondaruk and Steven A. Roberts and Colin Robertson}, 10 | month = {mar}, 11 | year = {2020}, 12 | note = {Publisher: NRC Research Press}, 13 | pages = {9--30}, 14 | } 15 | 16 | @Article{raposo_virtual_2019, 17 | title = {A {Virtual} {Globe} {Using} a {Discrete} {Global} {Grid} {System} to {Illustrate} the {Modifiable} {Areal} {Unit} {Problem}}, 18 | volume = {54}, 19 | issn = {0317-7173}, 20 | abstract = {In the same way that discrete global grid systems (DGGS) are used to index data on the spherical Earth, they can aggregate point data, with their spherical polygons serving as bins. DGGS are particularly useful at multiple map scales because they are spatially hierarchical and exist on the sphere or ellipsoid, allowing large or small scale binning without projection distortion. We use DGGS in a free and open-source pedagogical tool for teaching students about the modifiable areal unit problem (MAUP). Our software application uses Dutton’s quaternary triangular mesh (QTM) to bin global data points geodesically with counts or measures of any theme at multiple levels. Users can interactively select the level to which the data are binned by the QTM, as well as translate the whole tessellation east or west so that points fall into and out of different bins. These two functions illustrate the scaling and zoning aspects of the MAUP with dynamically-drawn choropleths on the surface of a virtual globe that the user can zoom and rotate, allowing visualization at virtually any cartographic scale. Users may also select various quantile classifications to further explore issues in visualizing aggregate data. In addition to presenting this new tool, we highlight the importance, especially at smaller scales, of using geodesic point-in-polygon intersection detection, rather than the projected 2D methods typically used by geographic information systems.}, 21 | number = {1}, 22 | urldate = {2021-07-30}, 23 | journal = {Cartographica: The International Journal for Geographic Information and Geovisualization}, 24 | author = {Paulo Raposo and Anthony {C. Robinson} and Randall Brown}, 25 | month = {mar}, 26 | year = {2019}, 27 | note = {Publisher: University of Toronto Press}, 28 | keywords = {Discrete Global Grid Systems, geodetic overlay, globe virtuel, maillage triangulaire quaternaire, modifiable areal unit problem, problème d’unités territoriales modifiables, quaternary triangular mesh, superposition géodésique, systèmes de grilles globales discrètes, virtual globes}, 29 | pages = {51--62}, 30 | } 31 | 32 | @Article{hernandez-perez_grid_2011, 33 | title = {Grid {Generation} {Issues} in the {CFD} {Modelling} of {Two}-{Phase} {Flow} in a {Pipe}}, 34 | volume = {3}, 35 | issn = {1757-482X}, 36 | abstract = {The grid generation issues found in the 3D simulation of two-phase flow in a pipe using Computational Fluid Dynamics (CFD) are discussed in this paper. Special attention is given to the effect of the element type and structure of the mesh. The simulations were carried out using the commercial software package STAR-CCM+, which is designed for numerical simulation of continuum mechanics problems. The model consisted of a cylindrical vertical pipe. Different mesh structures were employed in the computational domain. The condition of two-phase flow was simulated with the Volume of Fluid (VOF) model, taking into consideration turbulence effects using the k-e model. The results showed that there is a strong dependency of the flow behaviour on the mesh employed. The best result was obtained with the grid known as butterfly grid, while the cylindrical mesh produced misleading results. The simulation was validated against experimental results.}, 37 | language = {en}, 38 | number = {1}, 39 | urldate = {2021-08-07}, 40 | journal = {The Journal of Computational Multiphase Flows}, 41 | author = {V. Hernandez-Perez and M. Abdulkadir and B.J. Azzopardi}, 42 | month = {mar}, 43 | year = {2011}, 44 | note = {Publisher: SAGE Publications Ltd STM}, 45 | keywords = {CFD, butterfly grid, cylindrical O-grid, mesh generation, two-phase flow}, 46 | pages = {13--26}, 47 | } 48 | 49 | @Article{alidadi_beyond_2018, 50 | title = {Beyond monocentricity: examining the spatial distribution of employment in {Tehran} metropolitan region, {Iran}}, 51 | volume = {22}, 52 | issn = {1226-5934}, 53 | shorttitle = {Beyond monocentricity}, 54 | abstract = {This research examines the spatial distribution of employment in Tehran metropolitan region as one of the most populated regions in West Asia. For this aim, our approach includes three steps; first, the paper investigates the level of monocentricity or the primacy of the main core, then, the paper utilises various methodologies to identify the employment subcenters in the region; and finally, the importance of identified centres is estimated by polycentric employment function. To do this, data obtained from Statistical Centre of Iran for 2006 and 2011 is provided in sub-district level, the smallest geographical unit. Results revealed that monocentric model is not able to explain the spatial distribution of employment in TMR; also, the main core loses its importance with the passage of time. Applying different methodologies for TMR identified 3 subcenters in 2006; whereas, it reached to 7 subcenters in 2011. In the last step, the deployed polycentric employment function explained 42\% and 51\% of total employment distribution throughout TMR in 2006 and 2011 respectively.}, 55 | number = {1}, 56 | urldate = {2021-08-07}, 57 | journal = {International Journal of Urban Sciences}, 58 | author = {Mehdi Alidadi and Hashem Dadashpoor}, 59 | month = {jan}, 60 | year = {2018}, 61 | pages = {38--58}, 62 | } 63 | 64 | @Article{vinoth_kumar_spatio-temporal_2007, 65 | title = {Spatio-temporal analysis for monitoring urban growth – a case study of {Indore} {City}}, 66 | volume = {35}, 67 | issn = {0974-3006}, 68 | abstract = {Urban sprawl is characterized by haphazard patchwork of development, which leads to an improper development in any city. To prevent this kind of sprawl in future, it is necessary to monitor the growth of the city. Hence, an attempt has been made in the present study to monitor the urban growth over a period of time by employing Remote Sensing and Geographic Information System techniques in conjunction with Shannon entropy. Shannon entropy is a measure to determine the compactness or dispersion of built-up land growth in the urban areas. The growth patterns of urban built-up land have been studied initially by dividing the area into four zones. The observations have been made with respect to each zone. Then, the study area has been divided into concentric circles of 1 km buffers and the growth patterns have been studied based on urban built-up density with respect to each circular buffer in all four zones. These observations have been integrated with road network to check the influence of infrastructure on haphazard urban growth. It has been found from the study that Shannon entropy is a good measure to determine the spatial concentration or dispersion of built-up land in the city. The study also proved the potential of RS and GIS techniques in the spatio-temporal analysis of urban growth trends and their consequences in the lands adjoining to urban areas.}, 69 | language = {en}, 70 | number = {1}, 71 | urldate = {2021-07-25}, 72 | journal = {Journal of the Indian Society of Remote Sensing}, 73 | author = {J. Antony {Vinoth Kumar} and S. K. Pathan and R. J. Bhanderi}, 74 | month = {mar}, 75 | year = {2007}, 76 | pages = {11--20}, 77 | } 78 | 79 | @TechReport{hart_use_1991, 80 | title = {The use of visual cues for vehicle control and navigation}, 81 | abstract = {At least three levels of control are required to operate most vehicles: (1) inner-loop control to counteract the momentary effects of disturbances on vehicle position; (2) intermittent maneuvers to avoid obstacles, and (3) outer-loop control to maintain a planned route. Operators monitor dynamic optical relationships in their immediate surroundings to estimate momentary changes in forward, lateral, and vertical position, rates of change in speed and direction of motion, and distance from obstacles. The process of searching the external scene to find landmarks (for navigation) is intermittent and deliberate, while monitoring and responding to subtle changes in the visual scene (for vehicle control) is relatively continuous and 'automatic'. However, since operators may perform both tasks simultaneously, the dynamic optical cues available for a vehicle control task may be determined by the operator's direction of gaze for wayfinding. An attempt to relate the visual processes involved in vehicle control and wayfinding is presented. The frames of reference and information used by different operators (e.g., automobile drivers, airline pilots, and helicopter pilots) are reviewed with particular emphasis on the special problems encountered by helicopter pilots flying nap of the earth (NOE). The goal of this overview is to describe the context within which different vehicle control tasks are performed and to suggest ways in which the use of visual cues for geographical orientation might influence visually guided control activities.}, 82 | number = {N92-21468}, 83 | urldate = {2021-07-26}, 84 | institution = {NASA}, 85 | author = {Sandra G. Hart and Vernol Battiste}, 86 | month = {apr}, 87 | year = {1991}, 88 | keywords = {BEHAVIORAL SCIENCES}, 89 | } 90 | 91 | @Article{jelinski_modifiable_1996, 92 | title = {The modifiable areal unit problem and implications for landscape ecology}, 93 | volume = {11}, 94 | issn = {1572-9761}, 95 | abstract = {Landscape ecologists often deal with aggregated data and multiscaled spatial phenomena. Recognizing the sensitivity of the results of spatial analyses to the definition of units for which data are collected is critical to characterizing landscapes with minimal bias and avoidance of spurious relationships. We introduce and examine the effect of data aggregation on analysis of landscape structure as exemplified through what has become known, in the statistical and geographical literature, as theModifiable Areal Unit Problem (MAUP). The MAUP applies to two separate, but interrelated, problems with spatial data analysis. The first is the “scale problem”, where the same set of areal data is aggregated into several sets of larger areal units, with each combination leading to different data values and inferences. The second aspect of the MAUP is the “zoning problem”, where a given set of areal units is recombined into zones that are of the same size but located differently, again resulting in variation in data values and, consequently, different conclusions. We conduct a series of spatial autocorrelation analyses based on NDVI (Normalized Difference Vegetation Index) to demonstrate how the MAUP may affect the results of landscape analysis. We conclude with a discussion of the broader-scale implications for the MAUP in landscape ecology and suggest approaches for dealing with this issue.}, 96 | language = {en}, 97 | number = {3}, 98 | urldate = {2021-07-15}, 99 | journal = {Landscape Ecology}, 100 | author = {Dennis E. Jelinski and Jianguo Wu}, 101 | month = {jun}, 102 | year = {1996}, 103 | pages = {129--140}, 104 | } 105 | 106 | @Article{chandra_multi-objective_2021, 107 | title = {A multi-objective genetic algorithm approach to design optimal zoning systems for freight transportation planning}, 108 | volume = {92}, 109 | issn = {09666923}, 110 | language = {en}, 111 | urldate = {2021-07-15}, 112 | journal = {Journal of Transport Geography}, 113 | author = {Aitichya Chandra and M.N. Sharath and Agnivesh Pani and Prasanta K. Sahu}, 114 | month = {apr}, 115 | year = {2021}, 116 | pages = {103037}, 117 | } 118 | 119 | @Article{baker_zoning_1925, 120 | title = {Zoning {Legislation}}, 121 | volume = {11}, 122 | journal = {Cornell Law Review}, 123 | author = {Newman F. Baker}, 124 | year = {1925}, 125 | note = {Publisher: HeinOnline}, 126 | keywords = {⛔ No DOI found}, 127 | pages = {164}, 128 | } 129 | 130 | @Article{bryant_worcestershire_2007, 131 | title = {The {Worcestershire} {Tithe} and {Enclosure} {Map} {Project}: creating a research resource}, 132 | volume = {29}, 133 | shorttitle = {The {Worcestershire} {Tithe} and {Enclosure} {Map} {Project}}, 134 | number = {1}, 135 | journal = {Landscape History}, 136 | author = {Victoria Bryant and Maggi Noke}, 137 | year = {2007}, 138 | pages = {89--92}, 139 | } 140 | 141 | @Article{openshaw_optimal_1977, 142 | title = {Optimal zoning systems for spatial interaction models}, 143 | volume = {9}, 144 | issn = {0308-518X}, 145 | number = {2}, 146 | journal = {Environment and Planning A}, 147 | author = {S Openshaw}, 148 | year = {1977}, 149 | pages = {169--184}, 150 | } 151 | 152 | @Article{dorling_area_2011, 153 | title = {Area {Cartograms}: {Their} {Use} and {Creation}}, 154 | issn = {0 306-6142}, 155 | abstract = {This book provides an introduction to the concept of cartograms, the various methods of creating them, and some common applications. It contains a large number of colour figures to visually demonstrate the power of cartograms, drawn from many different sources.}, 156 | journal = {The Map Reader: Theories of Mapping Practice and Cartographic Representation}, 157 | author = {Daniel Dorling}, 158 | year = {2011}, 159 | keywords = {\#nosource, Area cartograms - their use and creation, Circular cartograms, Competing cartogram algorithms, Exploring popularity of technique in political car, Political cartography, Term physical accretion model - and constructing c}, 160 | pages = {252--260}, 161 | } 162 | 163 | @Article{lovelace_propensity_2017, 164 | title = {The {Propensity} to {Cycle} {Tool}: {An} open source online system for sustainable transport planning}, 165 | volume = {10}, 166 | copyright = {Copyright (c) 2016 Robin Lovelace, Anna Goodman, Rachel Aldred, Nikolai Berkoff, Ali Abbas, James Woodcock}, 167 | issn = {1938-7849}, 168 | shorttitle = {The {Propensity} to {Cycle} {Tool}}, 169 | abstract = {Getting people cycling is an increasingly common objective in transport planning institutions worldwide. A growing evidence base indicates that high quality infrastructure can boost local cycling rates. Yet for infrastructure and other cycling measures to be effective, it is important to intervene in the right places, such as along ‘desire lines’ of high latent demand. This creates the need for tools and methods to help answer the question ‘where to build?’. Following a brief review of the policy and research context related to this question, this paper describes the design, features and potential applications of such a tool. The Propensity to Cycle Tool (PCT) is an online, interactive planning support system that was initially developed to explore and map cycling potential across England (see www.pct.bike). Based on origin-destination data it models cycling levels at area, desire line, route and route network levels, for current levels of cycling, and for scenario-based ‘cycling futures.’ Four scenarios are presented, including ‘Go Dutch’ and ‘Ebikes,’ which explore what would happen if English people had the same propensity to cycle as Dutch people and the potential impact of electric cycles on cycling uptake. The cost effectiveness of investment depends not only on the number of additional trips cycled, but on wider impacts such as health and carbon benefits. The PCT reports these at area, desire line, and route level for each scenario. The PCT is open source, facilitating the creation of scenarios and deployment in new contexts. We conclude that the PCT illustrates the potential of online tools to inform transport decisions and raises the wider issue of how models should be used in transport planning.}, 170 | language = {en}, 171 | number = {1}, 172 | urldate = {2017-06-01}, 173 | journal = {Journal of Transport and Land Use}, 174 | author = {Robin Lovelace and Anna Goodman and Rachel Aldred and Nikolai Berkoff and Ali Abbas and James Woodcock}, 175 | month = {jan}, 176 | year = {2017}, 177 | keywords = {Cycling, Participatory, Planning, modelling}, 178 | } 179 | 180 | @Article{wills_persistence_2016, 181 | title = {Persistence of {Neighborhood} {Demographic} {Influences} over {Long} {Phylogenetic} {Distances} {May} {Help} {Drive} {Post}-{Speciation} {Adaptation} in {Tropical} {Forests}}, 182 | volume = {11}, 183 | issn = {1932-6203}, 184 | abstract = {Studies of forest dynamics plots (FDPs) have revealed a variety of negative density-dependent (NDD) demographic interactions, especially among conspecific trees. These interactions can affect growth rate, recruitment and mortality, and they play a central role in the maintenance of species diversity in these complex ecosystems. Here we use an equal area annulus (EAA) point-pattern method to comprehensively analyze data from two tropical FDPs, Barro Colorado Island in Panama and Sinharaja in Sri Lanka. We show that these NDD interactions also influence the continued evolutionary diversification of even distantly related tree species in these FDPs. We examine the details of a wide range of these interactions between individual trees and the trees that surround them. All these interactions, and their cumulative effects, are strongest among conspecific focal and surrounding tree species in both FDPs. They diminish in magnitude with increasing phylogenetic distance between heterospecific focal and surrounding trees, but do not disappear or change the pattern of their dependence on size, density, frequency or physical distance even among the most distantly related trees. The phylogenetic persistence of all these effects provides evidence that interactions between tree species that share an ecosystem may continue to promote adaptive divergence even after the species’ gene pools have become separated. Adaptive divergence among taxa would operate in stark contrast to an alternative possibility that has previously been suggested, that distantly related species with dispersal-limited distributions and confronted with unpredictable neighbors will tend to converge on common strategies of resource use. In addition, we have also uncovered a positive density-dependent effect: growth rates of large trees are boosted in the presence of a smaller basal area of surrounding trees. We also show that many of the NDD interactions switch sign rapidly as focal trees grow in size, and that their cumulative effect can strongly influence the distributions and species composition of the trees that surround the focal trees during the focal trees’ lifetimes.}, 185 | language = {en}, 186 | number = {6}, 187 | urldate = {2021-04-10}, 188 | journal = {PLOS ONE}, 189 | author = {Christopher Wills and Kyle E. Harms and Thorsten Wiegand and Ruwan Punchi-Manage and Gregory S. Gilbert and David Erickson and W. John Kress and Stephen P. Hubbell and C. V. Savitri Gunatilleke and I. A. U. Nimal Gunatilleke}, 190 | month = {jun}, 191 | year = {2016}, 192 | note = {Publisher: Public Library of Science}, 193 | keywords = {Autocorrelation, Census, Death rates, Phylogenetic analysis, Phylogenetics, Seedlings, Species interactions, Trees}, 194 | pages = {e0156913}, 195 | } 196 | 197 | @Article{paez_exploring_2006, 198 | title = {Exploring contextual variations in land use and transport analysis using a probit model with geographical weights}, 199 | volume = {14}, 200 | issn = {0966-6923}, 201 | abstract = {A majority of statistical methods used in the analysis of land use and transportation systems implicitly carry the assumption that relationships are constant across locations or individuals, thus ignoring contextual variation due to geographical or socio-economic heterogeneity. In some cases, where the assumption of constant relationships is questionable, market segmentation procedures are used to study varying relationships. More recently, methodological developments, and a greater awareness of the importance of geography, have led to increasingly sophisticated ways to explore varying relationships in land use and transportation modeling. The objective of this paper is to propose a simple probit model to explore contextual variability in continuous-space. Some conceptual and technical issues are discussed, and an example is presented that reanalyzes land use change using data from California’s BART system. The results of the example suggest that considerable parametric variation exists across geographical space, and thus underlines the relevance of contextual effects.}, 202 | language = {en}, 203 | number = {3}, 204 | urldate = {2021-03-08}, 205 | journal = {Journal of Transport Geography}, 206 | author = {Antonio P{\a'a}ez}, 207 | month = {may}, 208 | year = {2006}, 209 | keywords = {Geographically weighted regression, Heteroscedastic probit model, Maximum likelihood estimation, Transportation and land use analysis, Variance functions}, 210 | pages = {167--176}, 211 | } 212 | 213 | @Article{teulade-denantes_routes_2015, 214 | title = {Routes visualization: {Automated} placement of multiple route symbols along a physical network infrastructure}, 215 | volume = {2015}, 216 | issn = {1948-660X}, 217 | shorttitle = {Routes visualization}, 218 | abstract = {Routes visualization: Automated placement of multiple route symbols along a physical network infrastructure}, 219 | number = {11}, 220 | urldate = {2021-03-08}, 221 | journal = {Journal of Spatial Information Science}, 222 | author = {Jules Teulade-Denantes and Adrien Maudet and C{\a'e}cile Duch{\^e}ne}, 223 | month = {dec}, 224 | year = {2015}, 225 | pages = {53--79}, 226 | } 227 | 228 | @Article{anderson_augmented_2018, 229 | title = {Augmented space planning: {Using} procedural generation to automate desk layouts}, 230 | volume = {16}, 231 | issn = {1478-0771}, 232 | shorttitle = {Augmented space planning}, 233 | abstract = {We developed a suite of procedural algorithms for space planning in commercial offices. These algorithms were benchmarked against 13,000 actual offices designed by human architects. The algorithm performed as well as an architect on 77\% of offices, and achieved a higher capacity in an additional 6\%, all while following a set of space standards. If the algorithm used the space standards the same way as an architect (a more relaxed interpretation), the algorithm achieved a 97\% match rate, which means that the algorithm completed this design task as well as a designer and in a shorter time. The benchmarking of a layout algorithm against thousands of existing designs is a novel contribution of this article, and we argue that it might be a first step toward a more comprehensive method to automate parts of the office layout process.}, 234 | language = {en}, 235 | number = {2}, 236 | urldate = {2021-03-08}, 237 | journal = {International Journal of Architectural Computing}, 238 | author = {Carl Anderson and Carlo Bailey and Andrew Heumann and Daniel Davis}, 239 | month = {jun}, 240 | year = {2018}, 241 | note = {Publisher: SAGE Publications}, 242 | keywords = {Office design, automation, design augmentation, desk layouts, office layout, space planning}, 243 | pages = {164--177}, 244 | } 245 | 246 | @Article{onrust_ecologically_2017, 247 | title = {Ecologically {Sound} {Procedural} {Generation} of {Natural} {Environments}}, 248 | volume = {2017}, 249 | issn = {1687-7047}, 250 | abstract = {Current techniques for the creation and exploration of virtual worlds are largely unable to generate sound natural environments from ecological data and to provide interactive web-based visualizations of such detailed environments. We tackle this challenge and propose a novel framework that (i) explores the advantages of landscape maps and ecological statistical data, translating them to an ecologically sound plant distribution, and (ii) creates a visually convincing 3D representation of the natural environment suitable for its interactive visualization over the web. Our vegetation model improves techniques from procedural ecosystem generation and neutral landscape modeling. It is able to generate diverse ecological sound plant distributions directly from landscape maps with statistical ecological data. Our visualization model integrates existing level of detail and illumination techniques to achieve interactive frame rates and improve realism. We validated with ecology experts the outcome of our framework using two case studies and concluded that it provides convincing interactive visualizations of large natural environments.}, 251 | language = {en}, 252 | urldate = {2021-03-08}, 253 | journal = {International Journal of Computer Games Technology}, 254 | author = {Benny Onrust and Rafael Bidarra and Robert Rooseboom and Johan {van de Koppel}}, 255 | month = {may}, 256 | year = {2017}, 257 | note = {Publisher: Hindawi}, 258 | pages = {e7057141}, 259 | } 260 | 261 | @Article{mustafa_procedural_2020, 262 | title = {Procedural generation of flood-sensitive urban layouts}, 263 | volume = {47}, 264 | issn = {2399-8083}, 265 | abstract = {Aside from modeling geometric shape, three-dimensional (3D) urban procedural modeling has shown its value in understanding, predicting and/or controlling effects of shape on design and urban planning. In this paper, instead of the construction of flood resistant measures, we create a procedural generation system for designing urban layouts that passively reduce water depth during a flooding scenario. Our tool enables exploring designs that passively lower flood depth everywhere or mostly in chosen key areas. Our approach tightly integrates a hydraulic model and a parameterized urban generation system with an optimization engine so as to find the least cost modification to an initial urban layout design. Further, due to the computational cost of a fluid simulation, we train neural networks to assist with accelerating the design process. We have applied our system to several real-world locations and have obtained improved 3D urban models in just a few seconds.}, 266 | language = {en}, 267 | number = {5}, 268 | urldate = {2021-03-08}, 269 | journal = {Environment and Planning B: Urban Analytics and City Science}, 270 | author = {Ahmed Mustafa and Xiao {Wei Zhang} and Daniel G Aliaga and Martin Bruwier and Gen Nishida and Benjamin Dewals and S{\a'e}bastian Erpicum and Pierre Archambeau and Michel Pirotton and Jacques Teller}, 271 | month = {jun}, 272 | year = {2020}, 273 | note = {Publisher: SAGE Publications Ltd STM}, 274 | keywords = {Inverse procedural modeling, Markov Chain Monte Carlo, neural network, urban flooding, urban layout}, 275 | pages = {889--911}, 276 | } 277 | 278 | @Article{lin_cartographic_2017, 279 | title = {A cartographic modeling approach to isopleth mapping}, 280 | volume = {31}, 281 | issn = {1365-8816}, 282 | abstract = {Isopleth maps depict different types of standardized data densities, general ratios/rates, and proportions/percentages. In this study, we describe different paths each type of standardized data takes to construct isoplethic surfaces in a cartographic modeling framework. As suggested in previous research, an area-based pycnophylactic interpolator is preferred to point interpolators in isopleth mapping not only because it preserves the total volume in each aggregation unit but also because it is non-parametric and is able to incorporate ancillary data to increase the accuracy of a surface representation. Here, a general pycnophylactic method is used to generate isopleth maps of density surfaces, but a hybrid approach is proposed to address the small denominator problem that arises when mapping ratio/rate and proportion/percentage surfaces. Finally, we propose a value-by-perspective height mapping procedure to resolve the visual equalization problem associated with ratio/rate and proportion/percentage surfaces that enable one to distinguish among high rate/large denominator, high rate/small denominator, low rate/large denominator, and low rate/small denominator regions of the surface.}, 283 | number = {5}, 284 | urldate = {2021-03-08}, 285 | journal = {International Journal of Geographical Information Science}, 286 | author = {Jie Lin and Dean M. Hanink and Robert G. Cromley}, 287 | month = {may}, 288 | year = {2017}, 289 | keywords = {Cartographic modeling, isopleth mapping, pycnophylactic interpolation}, 290 | pages = {849--866}, 291 | } 292 | 293 | @Article{galin_procedural_2010, 294 | title = {Procedural {Generation} of {Roads}}, 295 | volume = {29}, 296 | copyright = {© 2010 The Author(s) Journal compilation © 2010 The Eurographics Association and Blackwell Publishing Ltd.}, 297 | issn = {1467-8659}, 298 | abstract = {In this paper, we propose an automatic method for generating roads based on a weighted anisotropic shortest path algorithm. Given an input scene, we automatically create a path connecting an initial and a final point. The trajectory of the road minimizes a cost function that takes into account the different parameters of the scene including the slope of the terrain, natural obstacles such as rivers, lakes, mountains and forests. The road is generated by excavating the terrain along the path and instantiating generic parameterized models.}, 299 | language = {en}, 300 | number = {2}, 301 | urldate = {2021-03-08}, 302 | journal = {Computer Graphics Forum}, 303 | author = {E. Galin and A. Peytavie and N. Mar{\a'e}chal and E. Gu{\a'e}rin}, 304 | year = {2010}, 305 | keywords = {Computer Graphics: Three-Dimensional Graphics and Realism, Procedural modeling, discrete anisotropic shortest path, road generation}, 306 | pages = {429--438}, 307 | } 308 | 309 | @Article{hesselbarth_landscapemetrics_2019, 310 | title = {landscapemetrics: an open-source {R} tool to calculate landscape metrics}, 311 | volume = {42}, 312 | shorttitle = {landscapemetrics}, 313 | number = {10}, 314 | journal = {Ecography}, 315 | author = {Maximilian HK Hesselbarth and Marco Sciaini and Kimberly A. With and Kerstin Wiegand and Jakub Nowosad}, 316 | year = {2019}, 317 | note = {Publisher: Wiley Online Library}, 318 | pages = {1648--1657}, 319 | } 320 | 321 | @Article{long_modeling_2018, 322 | title = {Modeling movement probabilities within heterogeneous spatial fields}, 323 | volume = {2018}, 324 | number = {16}, 325 | journal = {Journal of Spatial Information Science}, 326 | author = {Jed A. Long}, 327 | year = {2018}, 328 | keywords = {⛔ No DOI found}, 329 | pages = {85--116}, 330 | } 331 | 332 | @Article{ciglic_evaluating_2019, 333 | title = {Evaluating existing manually constructed natural landscape classification with a machine learning-based approach}, 334 | volume = {2019}, 335 | number = {18}, 336 | journal = {Journal of Spatial Information Science}, 337 | author = {Rok Ciglic and Erik Strumbelj and Rok Cesnovar and Mauro Hrvatin and Drago Perko}, 338 | year = {2019}, 339 | keywords = {⛔ No DOI found}, 340 | pages = {31--56}, 341 | } 342 | 343 | @Article{hirzel_which_2002, 344 | title = {Which is the optimal sampling strategy for habitat suitability modelling}, 345 | volume = {157}, 346 | issn = {0304-3800}, 347 | abstract = {Designing an efficient sampling strategy is of crucial importance for habitat suitability modelling. This paper compares four such strategies, namely, ‘random’, ‘regular’, ‘proportional-stratified’ and ‘equal-stratified’—to investigate (1) how they affect prediction accuracy and (2) how sensitive they are to sample size. In order to compare them, a virtual species approach (Ecol. Model. 145 (2001) 111) in a real landscape, based on reliable data, was chosen. The distribution of the virtual species was sampled 300 times using each of the four strategies in four sample sizes. The sampled data were then fed into a GLM to make two types of prediction: (1) habitat suitability and (2) presence/absence. Comparing the predictions to the known distribution of the virtual species allows model accuracy to be assessed. Habitat suitability predictions were assessed by Pearson's correlation coefficient and presence/absence predictions by Cohen's κ agreement coefficient. The results show the ‘regular’ and ‘equal-stratified’ sampling strategies to be the most accurate and most robust. We propose the following characteristics to improve sample design: (1) increase sample size, (2) prefer systematic to random sampling and (3) include environmental information in the design.}, 348 | language = {en}, 349 | number = {2}, 350 | urldate = {2021-03-16}, 351 | journal = {Ecological Modelling}, 352 | author = {Alexandre Hirzel and Antoine Guisan}, 353 | month = {nov}, 354 | year = {2002}, 355 | keywords = {Bootstrap statistics, GLM, Logistic model, Sampling design, Simulations, Virtual species}, 356 | pages = {331--341}, 357 | } 358 | 359 | @Article{thomson_gridsample_2017, 360 | title = {{GridSample}: an {R} package to generate household survey primary sampling units ({PSUs}) from gridded population data}, 361 | volume = {16}, 362 | issn = {1476-072X}, 363 | shorttitle = {{GridSample}}, 364 | abstract = {Household survey data are collected by governments, international organizations, and companies to prioritize policies and allocate billions of dollars. Surveys are typically selected from recent census data; however, census data are often outdated or inaccurate. This paper describes how gridded population data might instead be used as a sample frame, and introduces the R GridSample algorithm for selecting primary sampling units (PSU) for complex household surveys with gridded population data. With a gridded population dataset and geographic boundary of the study area, GridSample allows a two-step process to sample “seed” cells with probability proportionate to estimated population size, then “grows” PSUs until a minimum population is achieved in each PSU. The algorithm permits stratification and oversampling of urban or rural areas. The approximately uniform size and shape of grid cells allows for spatial oversampling, not possible in typical surveys, possibly improving small area estimates with survey results.}, 365 | number = {1}, 366 | urldate = {2021-03-16}, 367 | journal = {International Journal of Health Geographics}, 368 | author = {Dana R. Thomson and Forrest R. Stevens and Nick W. Ruktanonchai and Andrew J. Tatem and Marcia C. Castro}, 369 | month = {jul}, 370 | year = {2017}, 371 | keywords = {Cluster sample, Cluster survey, Multi-stage}, 372 | pages = {25}, 373 | } 374 | 375 | @Article{holmes_problems_1967, 376 | title = {Problems in {Location} {Sampling}}, 377 | volume = {57}, 378 | issn = {0004-5608}, 379 | abstract = {In geographical and related research uncertainty and error have arisen in the selection, application, and interpretation of designs in plane sampling largely through a failure to differentiate between area sampling and location sampling procedures. A survey of some commonly used area sampling designs indicates the frequency and extent of this error. Location sampling has remained a poorly understood technique through its confusion with area sampling, and there has been a consequent failure to grapple with the problems of stratifying by location when the primary sampling items are of varying areal extent or are irregularly spaced. An objective method of stratifying by location employing minimal aggregation is described and is used as a basis for both systematic and random samples. Tests on locationally stratified farm sample designs, based upon certain known and hypothetical farm characteristics in one New South Wales shire, indicate that increases in sampling precision obtained through locational stratification are directly related to the levels of spatial segregation in the items being sampled.}, 380 | number = {4}, 381 | urldate = {2021-03-16}, 382 | journal = {Annals of the Association of American Geographers}, 383 | author = {John Holmes}, 384 | month = {dec}, 385 | year = {1967}, 386 | pages = {757--780}, 387 | } 388 | 389 | @Article{boeing_spatial_2021, 390 | title = {Spatial information and the legibility of urban form: {Big} data in urban morphology}, 391 | volume = {56}, 392 | issn = {0268-4012}, 393 | shorttitle = {Spatial information and the legibility of urban form}, 394 | abstract = {Urban planning and morphology have relied on analytical cartography and visual communication tools for centuries to illustrate spatial patterns, conceptualize proposed designs, compare alternatives, and engage the public. Classic urban form visualizations – from Giambattista Nolli’s ichnographic maps of Rome to Allan Jacobs’s figure-ground diagrams of city streets – have compressed physical urban complexity into easily comprehensible information artifacts. Today we can enhance these traditional workflows through the Smart Cities paradigm of understanding cities via user-generated content and harvested data in an information management context. New spatial technology platforms and big data offer new lenses to understand, evaluate, monitor, and manage urban form and evolution. This paper builds on the theoretical framework of visual cultures in urban planning and morphology to introduce and situate computational data science processes for exploring urban fabric patterns and spatial order. It demonstrates these workflows with OSMnx and data from OpenStreetMap, a collaborative spatial information system and mapping platform, to examine street network patterns, orientations, and configurations in different study sites around the world, considering what these reveal about the urban fabric. The age of ubiquitous urban data and computational toolkits opens up a new era of worldwide urban form analysis from integrated quantitative and qualitative perspectives.}, 395 | language = {en}, 396 | urldate = {2021-02-19}, 397 | journal = {International Journal of Information Management}, 398 | author = {Geoff Boeing}, 399 | month = {feb}, 400 | year = {2021}, 401 | keywords = {OpenStreetMap, Urban design, Urban form, Urban morphology, Urban planning, Visualization}, 402 | pages = {102013}, 403 | } 404 | 405 | @Article{orr_persistence_1969, 406 | title = {The {Persistence} of the {Gerrymander} in {North} {Carolina} {Congressional} {Redistricting}}, 407 | volume = {9}, 408 | issn = {1549-6929}, 409 | abstract = {THE PERSISTENCE OF THE GERRYMANDER IN NORTH CAROLINA CONGRESSIONAL REDISTRICTING Douglas M. Orr, Jr.* One of the most important special geographic units below the state level is that established for conducting elections. The determination of one type of electoral unit, the congressional district, is a matter of considerable conse­ quence to our federal system of government as manifested in the United States House of Representatives—’’the grand depository of the democratic principle.” Yet in our supposedly democratic form of government, congres­ sional districts have traditionally been subject to enormous population disparities and gerrymandering. Political power is rarely surrendered volun­ tarily. A political “rule of the game” has been that the party or interest in power apportioned and redistricted so as to stay in power. North Carolina has been no exception. Among its 21 congressional district plans over the past 180 years, including three realignments since 1960, the gerrymander had helped perpetuate the rural domination of the state’s congressional delegation, and mor§ recently, it has been used to try to stem the rising tide of Republicanism and return Democratic incumbents to office. North Carolina therefore provides a meaningful case study as to the persistence of a tactic that has too long prevailed with the nation’s political system. GERRYMANDERING. The practice of gerrymandering actually began in Europe, but the term itself originated in 1812 in Massachusetts when Governor Elbridge Gerry carved out an electoral district that was said to resemble a salamander due to its winding shape (Fig. 1). The corruption of the two words gave American politics this descriptive term. This art of political cartography has not been confined to one group or region; both political parties in most parts of the country have practiced it. It represents the manipulation of district boundries in order to juggle district populations for partisan advantage. However, considerable misconception exists con­ cerning the gerrymander. A gerrymandered district may not always be identified by its shape, in spite of the common connotation of the term. The one vote requirement, for example, can be considered an “anti-gerrymandering ” development because it prevents the inequities of overpopulated and underpopulated districts which were motivated often by party strength in such districts. Yet population equality does not completely prevent gerry­ mandering, although it does restrict partisan maneuvering. The gerrymander may therefore be manifested in several forms, and this art of political abuse has become quite refined by more than a century of *Dr. Orr is assistant professor of geography at the University of North Carolina, Charlotte. The paper was accepted for publication in June 1969. 40 So u t h e a st e r n G e o g r a ph er application in American politics. The several major gerrymandering tech­ niques developed from this experience are summarized as follows: (1) 1. Stacked Districts—This type of gerrymandered district probably best fits the popular conception of the practice. The “stacked” district exhibits the grotesque shape that has inspired the many vivid district descriptions, as it winds its way across the landscape, seeking out pockets of voting strength of one party or interest in an overall area that is predominantly sympathetic to the opposition. Figure 1. The original gerrymander, as depicted in the Boston Gazette of March 26, 1812. The Massachusetts Legislature placed certain towns of Essex County into this odd­ shaped senatorial district in order to concentrate the Federalist vote in as few districts as possible. At firs t described as a salamander, it became known as a gerrymander because Governor Elbridge Gerry signed the redistricting bill into law. Source: Ruth C. Silva, {"}Reapportionm ent and Redistricting,” Scientific American, No. 5, November 1965, p. 21. Vol. IX, No. 2 41 2. Excess Votes.—A popular gerrymandering device is to concentrate the opposition’s vote in as few districts as possible so that it is squandered by “overkill.” Such planned landslides, also known as “packed” districts, insure safe constituencies for a party, but sacrifice votes that might be desperately needed in neighboring districts. Democrats are particularly susceptible to this technique because of the large concentration of traditionally Democratic voters in metropolitan areas. In the South, rural Democrats have “packed” districts against urban Democrats. 3. Wasted Votes.—This...}, 410 | number = {2}, 411 | urldate = {2021-02-07}, 412 | journal = {Southeastern Geographer}, 413 | author = {Douglas M. Jr. Orr}, 414 | year = {1969}, 415 | note = {Publisher: The University of North Carolina Press}, 416 | pages = {39--54}, 417 | } 418 | 419 | @Article{chou_taming_2006, 420 | title = {Taming the {Gerrymander}—{Statistical} physics approach to {Political} {Districting} {Problem}}, 421 | volume = {369}, 422 | issn = {0378-4371}, 423 | abstract = {The Political Districting Problem is mapped to a q-state Potts model in which the constraints can be written as interactions between sites or external fields acting on the system. Districting into q voter districts is equivalent to finding the ground state of this q-state Potts model. We illustrate this by districting Taipei city in its 2008 Legislature Election. Statistical properties of the model are also studied.}, 424 | language = {en}, 425 | number = {2}, 426 | urldate = {2021-02-07}, 427 | journal = {Physica A: Statistical Mechanics and its Applications}, 428 | author = {Chung-I Chou and S. P. Li}, 429 | month = {sep}, 430 | year = {2006}, 431 | keywords = {Districting, Gerrymander, Potts model}, 432 | pages = {799--808}, 433 | } 434 | 435 | @Article{honick_pictorial_1967, 436 | title = {Pictorial {Navigation} {Displays}}, 437 | volume = {4}, 438 | issn = {0008-7041}, 439 | abstract = {A pictorial navigation display for aircraft is described, in which the aircraft's ground position and track are continuously displayed, superimposed on the projected image in colour of a topographical map stored on microfilm. The microphotographic technique developed for preparation of the map films is also described. A navigation display of this type will be incorporated in the prototype Concorde supersonic airliner.}, 440 | number = {2}, 441 | urldate = {2021-02-06}, 442 | journal = {The Cartographic Journal}, 443 | author = {K. R. Honick}, 444 | month = {dec}, 445 | year = {1967}, 446 | pages = {72--81}, 447 | } 448 | 449 | @Article{ross_dicuil_2019, 450 | title = {Dicuil (9th century) on triangular and square numbers}, 451 | volume = {34}, 452 | issn = {2637-5451}, 453 | abstract = {Dicuil was a ninth-century Irish monk who taught at the Carolingian school of Louis the Pious. He wrote a Computus or astronomical treatise in Latin in about 814–16, which contains a chapter on triangular and square numbers. Dicuil describes two methods for calculating triangular numbers: the simple method of summing the natural numbers, and the more complex method of multiplication, equivalent to the formula n(n + 1)/2. He also states that a square number is equal to twice a triangular number minus the generating number, equivalent to n2 = 2[n(n + 1)/2] – n. The multiplication formula for triangular numbers was first explicitly described in about the third century AD by the Greek authors Diophantus and Iamblichus. It was also known as a solution to other mathematical problems as early as 300 BC. It reappeared in the West in the sixteenth century. Dicuil thus fills a gap in our medieval knowledge.}, 454 | number = {2}, 455 | urldate = {2021-02-06}, 456 | journal = {British Journal for the History of Mathematics}, 457 | author = {Helen Elizabeth Ross and Betty Irene Knott}, 458 | month = {may}, 459 | year = {2019}, 460 | pages = {79--94}, 461 | } 462 | 463 | @Book{openshaw_modifiable_1983, 464 | title = {The modifiable areal unit problem}, 465 | publisher = {Geo Books Norwich,, UK}, 466 | author = {Stan Openshaw}, 467 | year = {1983}, 468 | } 469 | 470 | @Article{mindell_exposure-based_2012, 471 | title = {Exposure-{Based}, ‘{Like}-for-{Like}’ {Assessment} of {Road} {Safety} by {Travel} {Mode} {Using} {Routine} {Health} {Data}}, 472 | volume = {7}, 473 | issn = {1932-6203}, 474 | abstract = {Background Official reports on modal risk have not chosen appropriate numerators and denominators to enable like-for-like comparisons. We report age- and sex-specific deaths and injury rates from equivalent incidents in England by travel mode, distance travelled and time spent travelling. Methods Hospital admissions and deaths in England 2007–2009 were obtained for relevant ICD-10 external codes for pedestrians, cyclists, and car/van drivers, by age-group and sex. Distance travelled by age-group, sex and mode in England (National Travel Survey 2007–2009 data) was converted to time spent travelling using mean trip speeds. Fatality rates were compared with age-specific Netherlands data. Results All-age fatalities per million hours’ use (f/mhu) varied over the same factor-of-three range for both sexes (0.15–0.45 f/mhu by mode for men, 0.09–0.31 f/mhu for women). Risks were similar for men aged 21–49 y for all three modes and for female pedestrians and drivers aged 21–69 y. Most at risk were: males 17–20 y (1.3 f/mhu (95\% CI 1.2–1.4)) for driving; males 70+ (2.2 f/mhu(1.6–3.0)) for cycling; and females 70+ (0.95 f/mhu (0.86–1.1)) for pedestrians. In general, fatality rates were substantially higher among males than females. Risks per hour for male drivers \<30 y were similar or higher than for male cyclists; for males aged 17–20 y, the risk was higher for drivers (33/Bn km (30–36), 1.3 f/mhu (1.2–1.4)) than cyclists (20/Bn km (10–37), 0.24 f/mhu (0.12–0.45)) whether using distance or time. Similar age patterns occurred for cyclists and drivers in the Netherlands. Age-sex patterns for injuries resulting in hospital admission were similar for cyclists and pedestrians but lower for drivers. Conclusions When all relevant ICD-10 codes are used, fatalities by time spent travelling vary within similar ranges for walking, cycling and driving. Risks for drivers were highest in youth and fell with age, while for pedestrians and cyclists, risks increased with age. For the young, especially males, cycling is safer than driving.}, 475 | number = {12}, 476 | urldate = {2016-03-10}, 477 | journal = {PLOS ONE}, 478 | author = {Jennifer S. Mindell and Deborah Leslie and Malcolm Wardlaw}, 479 | month = {dec}, 480 | year = {2012}, 481 | keywords = {Age groups, Children, Death rates, England, Health statistics, Hospitals, Netherlands, Roads}, 482 | pages = {e50606}, 483 | } 484 | 485 | @article{lovelace_clockboard_2022, 486 | title = {{{ClockBoard}}: {{A}} Zoning System for Urban Analysis}, 487 | shorttitle = {{{ClockBoard}}}, 488 | author = {Lovelace, Robin and Tennekes, Martijn and Carlino, Dustin}, 489 | date = {2022-06-20}, 490 | journaltitle = {Journal of Spatial Information Science}, 491 | number = {24}, 492 | pages = {63--85}, 493 | issn = {1948-660X}, 494 | doi = {10.5311/JOSIS.2022.24.172}, 495 | url = {https://josis.org/index.php/josis/article/view/172}, 496 | urldate = {2022-07-02}, 497 | abstract = {Zones are the building blocks of urban analysis. Fields ranging from demographics to transport planning routinely use zones - spatially contiguous areal units that break-up continuous space into discrete chunks - as the foundation for diverse analysis techniques. Key methods such as origin-destination analysis and choropleth mapping rely on zones with appropriate sizes, shapes and coverage. However, existing zoning systems are sub-optimal in many urban analysis contexts, for three main reasons: 1) administrative zoning systems are often based on somewhat arbitrary factors; 2) zoning systems that are evidence-based (e.g., based on equal population size) are often highly variable in size and shape, reducing their utility for inter-city comparison; and 3) official zoning systems in many places simply do not exist or are unavailable. We set out to develop a flexible, open and scalable solution to these problems. The result is the zonebuilder project (with R, Rust and Python implementations), which was used to create the ClockBoard zoning system. ClockBoard consists of 12 segments emanating from a central place and divided by concentric rings with radii that increase in line with the triangular number sequence (1, 3, 6 km etc). 'ClockBoards' thus create a consistent visual frame of reference for monocentric cities that is reminiscent of clocks and a dartboard. This paper outlines the design and potential uses of the ClockBoard zoning system in the historical context, and discusses future avenues for research into the design and assessment of zoning systems.}, 498 | issue = {24}, 499 | langid = {english}, 500 | keywords = {modifiable area unit problem}, 501 | } 502 | -------------------------------------------------------------------------------- /zonebuilder.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Package 16 | PackageUseDevtools: Yes 17 | PackageInstallArgs: --no-multiarch --with-keep.source 18 | --------------------------------------------------------------------------------