├── .env.sample
├── .github
└── workflows
│ ├── ci.yml
│ └── codeql-analysis.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── QwikGeo API.postman_collection.json
├── README.md
├── docs
├── CNAME
├── docs
│ ├── analysis
│ │ ├── images
│ │ │ ├── aggregate_points_by_grids.png
│ │ │ ├── aggregate_points_by_polygons.png
│ │ │ ├── bounding_box.png
│ │ │ ├── buffer.png
│ │ │ ├── center_of_dataset.png
│ │ │ ├── center_of_each_polygon.png
│ │ │ ├── clip.png
│ │ │ ├── convex_hull.png
│ │ │ ├── dissolve.png
│ │ │ ├── dissolve_by_value.png
│ │ │ ├── find_within_distance.png
│ │ │ ├── hexagon_grids.png
│ │ │ ├── k_means_cluster.png
│ │ │ ├── select_inside.png
│ │ │ ├── select_outside.png
│ │ │ └── square_grids.png
│ │ └── index.md
│ ├── assets
│ │ └── images
│ │ │ ├── favicon.ico
│ │ │ └── qwikgeo.png
│ ├── authentication
│ │ └── index.md
│ ├── collections
│ │ └── index.md
│ ├── data
│ │ ├── cities.json
│ │ ├── state_data.csv
│ │ ├── states.geojson
│ │ ├── states.json
│ │ └── us-state-capitals.csv
│ ├── groups
│ │ └── index.md
│ ├── imports
│ │ └── index.md
│ ├── index.md
│ ├── items
│ │ └── index.md
│ ├── stylesheets
│ │ └── extra.css
│ ├── tables
│ │ └── index.md
│ └── users
│ │ └── index.md
└── mkdocs.yml
├── migrations
└── models
│ └── 0_20230825191735_init.sql
├── pyproject.toml
├── qwikgeo_api
├── authentication_handler.py
├── bins_sql.py
├── config.py
├── db.py
├── db_models.py
├── main.py
├── routers
│ ├── analysis
│ │ ├── analysis_queries.py
│ │ ├── models.py
│ │ └── router.py
│ ├── authentication
│ │ ├── models.py
│ │ └── router.py
│ ├── collections
│ │ ├── models.py
│ │ └── router.py
│ ├── imports
│ │ ├── models.py
│ │ ├── router.py
│ │ └── utilities.py
│ └── items
│ │ ├── groups
│ │ └── router.py
│ │ ├── maps
│ │ ├── models.py
│ │ └── router.py
│ │ ├── router.py
│ │ ├── tables
│ │ ├── models.py
│ │ └── router.py
│ │ └── users
│ │ ├── models.py
│ │ └── router.py
└── utilities.py
└── requirements.txt
/.env.sample:
--------------------------------------------------------------------------------
1 | DB_HOST=localhost
2 | DB_DATABASE=geoportal
3 | DB_USERNAME=postgres
4 | DB_PASSWORD=postgres
5 | DB_PORT=5432
6 | CACHE_AGE_IN_SECONDS=0
7 | MAX_FEATURES_PER_TILE=100000
8 | SECRET_KEY=asdasasfakjh324fds876921vdas7tfv1uqw76fasd87g2q
9 | GOOGLE_CLIENT_ID=asdasdas745-cj472811c26nu77fm5m98dasdasdasda1vkvk2ph8me.apps.googleusercontent.com
10 | JWT_TOKEN_EXPIRE_IN_MIUNTES=60000
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: build
2 | on:
3 | push:
4 | branches:
5 | - main
6 | jobs:
7 | deploy:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v2
11 | - uses: actions/setup-python@v2
12 | with:
13 | python-version: 3.x
14 | - run: pip install mkdocs==1.3.1
15 | - run: pip install mkdocs-material==8.4.2
16 | - run: mkdocs gh-deploy --force --clean --verbose --config-file ./docs/mkdocs.yml
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ "main" ]
20 | schedule:
21 | - cron: '24 1 * * 2'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v3
42 |
43 | # Initializes the CodeQL tools for scanning.
44 | - name: Initialize CodeQL
45 | uses: github/codeql-action/init@v2
46 | with:
47 | languages: ${{ matrix.language }}
48 | # If you wish to specify custom queries, you can do so here or in a config file.
49 | # By default, queries listed here will override any specified in a config file.
50 | # Prefix the list here with "+" to use these queries and those in the config file.
51 |
52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
53 | # queries: security-extended,security-and-quality
54 |
55 |
56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
57 | # If this step fails, then you should remove it and run the build manually (see below)
58 | - name: Autobuild
59 | uses: github/codeql-action/autobuild@v2
60 |
61 | # ℹ️ Command-line programs to run using the OS shell.
62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
63 |
64 | # If the Autobuild fails above, remove it and uncomment the following three lines.
65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
66 |
67 | # - run: |
68 | # echo "Run, Build Application using script"
69 | # ./location_of_script_within_repo/buildscript.sh
70 |
71 | - name: Perform CodeQL Analysis
72 | uses: github/codeql-action/analyze@v2
73 | with:
74 | category: "/language:${{matrix.language}}"
75 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /cache/
2 | __pycache__/
3 | .vscode
4 | /venv/
5 | .env
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM andrejreznik/python-gdal:py3.8.2-gdal3.0.4
2 |
3 | WORKDIR /
4 |
5 | COPY ./requirements.txt /app/requirements.txt
6 |
7 | RUN pip install --no-cache-dir -r /app/requirements.txt
8 |
9 | COPY . /
10 |
11 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080"]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Michael Keller
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # QwikGeo API
2 |
3 | 
4 |
5 | QwikGeo API is a enterprise scale api for a GIS portal. QwikGeo API is written in [Python](https://www.python.org/) using the [FastAPI](https://fastapi.tiangolo.com/) web framework.
6 |
7 | ---
8 |
9 | **Source Code**: https://github.com/qwikgeo/qwikgeo-api
10 |
11 | ---
12 |
13 | ## Tech Docs
14 |
15 | Docs available at [this link](https://docs.qwikgeo.com).
16 |
17 | ## Requirements
18 |
19 | QwikGeo API requires PostGIS >= 2.4.0.
20 |
21 | ## Configuration
22 |
23 | In order for the api to work you will need to edit the .env with your database to host the API.
24 |
25 | ```
26 | DB_HOST=localhost
27 | DB_DATABASE=qwikgeo
28 | DB_USERNAME=postgres
29 | DB_PASSWORD=postgres
30 | DB_PORT=5432
31 | CACHE_AGE_IN_SECONDS=0
32 | MAX_FEATURES_PER_TILE=100000
33 | SECRET_KEY=asdasasfakjh324fds876921vdas7tfv1uqw76fasd87g2q
34 | GOOGLE_CLIENT_ID=asdasdas745-cj472811c26nu77fm5m98dasdasdasda1vkvk2pscfasad.apps.googleusercontent.com
35 | JWT_TOKEN_EXPIRE_IN_MIUNTES=60000
36 | ```
37 |
38 | ## Usage
39 |
40 | ### Running Locally
41 |
42 | To run the app locally `uvicorn qwikgeo_api.main:app --reload`
43 |
44 | To run the docs locally `mkdocs serve`
45 |
46 | ### Production
47 | Build Dockerfile into a docker image to deploy to the cloud.
48 |
49 | ## Aerich Commmands
50 |
51 | `aerich init -t qwikgeo_api.main.DB_CONFIG --location migrations -s .`
52 |
53 | `aerich init-db`
--------------------------------------------------------------------------------
/docs/CNAME:
--------------------------------------------------------------------------------
1 | docs.qwikgeo.com
2 | www.docs.qwikgeo.com
--------------------------------------------------------------------------------
/docs/docs/analysis/images/aggregate_points_by_grids.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/aggregate_points_by_grids.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/aggregate_points_by_polygons.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/aggregate_points_by_polygons.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/bounding_box.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/bounding_box.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/buffer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/buffer.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/center_of_dataset.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/center_of_dataset.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/center_of_each_polygon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/center_of_each_polygon.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/clip.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/clip.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/convex_hull.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/convex_hull.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/dissolve.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/dissolve.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/dissolve_by_value.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/dissolve_by_value.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/find_within_distance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/find_within_distance.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/hexagon_grids.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/hexagon_grids.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/k_means_cluster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/k_means_cluster.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/select_inside.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/select_inside.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/select_outside.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/select_outside.png
--------------------------------------------------------------------------------
/docs/docs/analysis/images/square_grids.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/analysis/images/square_grids.png
--------------------------------------------------------------------------------
/docs/docs/analysis/index.md:
--------------------------------------------------------------------------------
1 | # Analysis Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | --- | ----------- |
5 | | `GET` | `/api/v1/analysis/status/{process_id}` | [Analysis Status](#analysis-status) |
6 | | `POST` | `/api/v1/analysis/buffer` | [Buffer](#buffer) |
7 | | `POST` | `/api/v1/analysis/dissolve` | [Dissolve](#dissolve) |
8 | | `POST` | `/api/v1/analysis/dissolve_by_value` | [Dissolve By Value](#dissolve-by-value) |
9 | | `POST` | `/api/v1/analysis/square_grids` | [Square Grids](#square-grids) |
10 | | `POST` | `/api/v1/analysis/hexagon_grids` | [Hexagon Grids](#hexagon-grids) |
11 | | `POST` | `/api/v1/analysis/bounding_box` | [Bounding Box](#bounding-box]) |
12 | | `POST` | `/api/v1/analysis/k_means_cluster` | [K Means Cluster](#k-means-cluster) |
13 | | `POST` | `/api/v1/analysis/center_of_each_polygon` | [Center Of Each Polygon](#center-of-each-polygon) |
14 | | `POST` | `/api/v1/analysis/center_of_dataset` | [Center Of Dataset](#center-of-dataset) |
15 | | `POST` | `/api/v1/analysis/find_within_distance` | [Find Within Distance](#find-within-distance) |
16 | | `POST` | `/api/v1/analysis/convex_hull` | [Convex Hull](#convex-hull) |
17 | | `POST` | `/api/v1/analysis/aggregate_points_by_grids` | [Aggregate Points By Grid](#aggregate-points-by-grid) |
18 | | `POST` | `/api/v1/analysis/aggregate_points_by_polygons` | [Aggregate Points By Polygons](#aggregate-points-by-polygons) |
19 | | `POST` | `/api/v1/analysis/select_inside` | [Select Inside](#select-inside) |
20 | | `POST` | `/api/v1/analysis/select_outside` | [Select Outside](#select-outside) |
21 | | `POST` | `/api/v1/analysis/clip` | [Clip](#clip) |
22 |
23 | ## Endpoint Description's
24 |
25 | ## Analysis Status
26 | Any time an analysis is submitted it given a process_id to have the analysis run in the background using [FastAPI's Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). To check the
27 | status of an analysis, you can call this endpoint with the process_id.
28 |
29 | ### Example Call
30 | ```shell
31 | https://api.qwikgeo.com/api/v1/analysis/status/472e29dc-91a8-41d3-b05f-cee34006e3f7
32 | ```
33 |
34 | ### Example Output - Still Running
35 | ```json
36 | {
37 | "status": "PENDING"
38 | }
39 | ```
40 |
41 | ### Example Output - Complete
42 | ```json
43 | {
44 | "status": "SUCCESS",
45 | "new_table_id": "shnxppipxrppsdkozuroilkubktfodibtqorhucjvxlcdrqyhh",
46 | "completion_time": "2022-07-06T19:33:17.950059",
47 | "run_time_in_seconds": 1.78599
48 | }
49 | ```
50 |
51 | ### Example Output - Error
52 | ```json
53 | {
54 | "status": "FAILURE",
55 | "error": "ERROR HERE",
56 | "completion_time": "2022-07-08T13:39:47.961389",
57 | "run_time_in_seconds": 0.040892
58 | }
59 | ```
60 |
61 | ## Buffer
62 |
63 | 
64 |
65 | ### Description
66 | Buffer an geometric table with a buffer in kilometers.
67 |
68 | Example: Buffer zip centroids by one kilometer.
69 |
70 | ### Example Input
71 | ```json
72 | {
73 | "table_id": "zip_centroids",
74 | "database": "data",
75 | "distance_in_kilometers": 1
76 | }
77 | ```
78 |
79 | ### Example Output
80 | ```json
81 | {
82 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
83 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
84 | }
85 | ```
86 |
87 | ## Dissolve
88 |
89 | 
90 |
91 | ### Description
92 | Dissolve any geometric table into one single geometry.
93 |
94 | Example: Dissolve all the US States into one single geometry.
95 |
96 | ### Example Input
97 | ```json
98 | {
99 | "table_id": "states",
100 | "database": "data"
101 | }
102 | ```
103 |
104 | ### Example Output
105 | ```json
106 | {
107 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
108 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
109 | }
110 | ```
111 |
112 | ## Dissolve By Value
113 | 
114 |
115 | ### Description
116 | Dissolve any geometric table into geometries based off a column in the table.
117 |
118 | Example: Dissolve US States based off a column in the table called `sub_region`.
119 |
120 | ### Example Input
121 | ```json
122 | {
123 | "table_id": "states",
124 | "database": "data",
125 | "column": "sub_region"
126 | }
127 | ```
128 |
129 | ### Example Output
130 | ```json
131 | {
132 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
133 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
134 | }
135 | ```
136 |
137 | ## Square Grids
138 | 
139 |
140 | ### Description
141 | Generate square grids of any size based off of a tables geometry.
142 |
143 | Example: Generate 100 kilometers square grids based off of a table containing US States.
144 |
145 | ### Example Input
146 | ```json
147 | {
148 | "table_id": "states",
149 | "database": "data",
150 | "grid_size_in_kilometers": 100
151 | }
152 | ```
153 |
154 | ### Example Output
155 | ```json
156 | {
157 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
158 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
159 | }
160 | ```
161 |
162 | ## Hexagon Grids
163 | 
164 |
165 | ### Description
166 | Generate hexagon grids of any size based off of a tables geometry.
167 |
168 | Example: Generate 100 kilometers hexagon grids based off of a table containing US States.
169 |
170 | ### Example Input
171 | ```json
172 | {
173 | "table_id": "states",
174 | "database": "data",
175 | "grid_size_in_kilometers": 100
176 | }
177 | ```
178 |
179 | ### Example Output
180 | ```json
181 | {
182 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
183 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
184 | }
185 | ```
186 |
187 | ## Bounding Box
188 | 
189 |
190 | ### Description
191 | Generate a bounding box of a table.
192 |
193 | Example: Find the bounding box of a table that contains all of the US States.
194 |
195 | ### Example Input
196 | ```json
197 | {
198 | "table_id": "states",
199 | "database": "data",
200 | }
201 | ```
202 |
203 | ### Example Output
204 | ```json
205 | {
206 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
207 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
208 | }
209 | ```
210 |
211 | ## K Means Cluster
212 | 
213 |
214 | Example: Group all US zip centroids into 5 groups based off of k means clusters.
215 |
216 | ### Description
217 | Use [K Means Clustering](https://en.wikipedia.org/wiki/K-means_clustering) to group points based on their location.
218 |
219 | ### Example Input
220 | ```json
221 | {
222 | "table_id": "zip_centroids",
223 | "database": "data",
224 | "number_of_clusters": 5
225 | }
226 | ```
227 |
228 | ### Example Output
229 | ```json
230 | {
231 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
232 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
233 | }
234 | ```
235 |
236 | ## Center Of Each Polygon
237 | 
238 |
239 | ### Description
240 | Find the center of each polygon for a given table.
241 |
242 | Example: Find the center of each US State.
243 |
244 | ### Example Input
245 | ```json
246 | {
247 | "table_id": "states",
248 | "database": "data"
249 | }
250 | ```
251 |
252 | ### Example Output
253 | ```json
254 | {
255 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
256 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
257 | }
258 | ```
259 |
260 | ## Center Of Dataset
261 | 
262 |
263 | ### Description
264 | Find the center of all geometries based off a given table.
265 |
266 | Example: Find the geomeric center of a table that contains all of the US States.
267 |
268 | ### Example Input
269 | ```json
270 | {
271 | "table_id": "states",
272 | "database": "data"
273 | }
274 | ```
275 |
276 | ### Example Output
277 | ```json
278 | {
279 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
280 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
281 | }
282 | ```
283 |
284 | ## Find Within Distance
285 | 
286 |
287 | ### Description
288 | Find all geometries within a given distance from a given point.
289 |
290 | Example: Find all states within `500` kilometers of latitude `40.45` and latitude `-88.95`.
291 |
292 | ### Example Input
293 | ```json
294 | {
295 | "table_id": "states",
296 | "database": "data",
297 | "latitude": 40.45,
298 | "longitude": -88.95,
299 | "distance_in_kilometers": 500
300 | }
301 | ```
302 |
303 | ### Example Output
304 | ```json
305 | {
306 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
307 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
308 | }
309 | ```
310 |
311 | ## Convex Hull
312 | 
313 |
314 | ### Description
315 | Find the smallest convex hull around a given table.
316 |
317 | Example: Find the smallest convex hull around all the US States.
318 |
319 | ### Example Input
320 | ```json
321 | {
322 | "table_id": "states",
323 | "database": "data"
324 | }
325 | ```
326 |
327 | ### Example Output
328 | ```json
329 | {
330 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
331 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
332 | }
333 | ```
334 |
335 | ## Aggregate Points By Grid
336 | 
337 |
338 | ### Description
339 | Aggregate a table of points into grids and determine how points are in each grid.
340 |
341 | Example: Determine how many zip centroids are each 1000 kilometer hexagon grid.
342 |
343 | ### Example Input
344 | ```json
345 | {
346 | "table_id": "zip_centroids",
347 | "database": "data",
348 | "distance_in_kilometers": 1000,
349 | "grid_type": "hexagon"
350 | }
351 | ```
352 |
353 | ### Example Output
354 | ```json
355 | {
356 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
357 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
358 | }
359 | ```
360 |
361 | ## Aggregate Points By Polygons
362 | 
363 |
364 | ### Description
365 | Aggregate a table of points into a table of polygons and determine how points are in each polygon.
366 |
367 | Example: Determine how many zip centroids are within each US State.
368 |
369 | ### Example Input
370 | ```json
371 | {
372 | "table_id": "zip_centroids",
373 | "database": "data",
374 | "polygons": "states"
375 | }
376 | ```
377 |
378 | ### Example Output
379 | ```json
380 | {
381 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
382 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
383 | }
384 | ```
385 |
386 | ## Select Inside
387 | 
388 |
389 | ### Description
390 | Find all geometries within a given polygon table.
391 |
392 | Example: Find all zip centroids within the US States table.
393 |
394 | ### Example Input
395 | ```json
396 | {
397 | "table_id": "zip_centroids",
398 | "database": "data",
399 | "polygons": "states"
400 | }
401 | ```
402 |
403 | ### Example Output
404 | ```json
405 | {
406 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
407 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
408 | }
409 | ```
410 |
411 | ## Select Outside
412 | 
413 |
414 | ### Description
415 | Find all geomtries outside a given polygon table.
416 |
417 | Example: Find all the zip centroids outside of the table with US States.
418 |
419 | ### Example Input
420 | ```json
421 | {
422 | "table_id": "zip_centroids",
423 | "database": "data",
424 | "polygons": "states"
425 | }
426 | ```
427 |
428 | ### Example Output
429 | ```json
430 | {
431 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
432 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
433 | }
434 | ```
435 |
436 | ## Clip
437 | 
438 |
439 | ### Description
440 | Clip any geometric table based of a polygon table.
441 |
442 | Example: Clip the US States table to a large polygon.
443 |
444 | ### Example Input
445 | ```json
446 | {
447 | "table_id": "states",
448 | "database": "data",
449 | "polygons": "big_polygon"
450 | }
451 | ```
452 |
453 | ### Example Output
454 | ```json
455 | {
456 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
457 | "url": "https://api.qwikgeo.com/api/v1/analysis/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
458 | }
459 | ```
--------------------------------------------------------------------------------
/docs/docs/assets/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/assets/images/favicon.ico
--------------------------------------------------------------------------------
/docs/docs/assets/images/qwikgeo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qwikgeo/qwikgeo-api/e8580918dec47588c9910f9c9d7790f181882a79/docs/docs/assets/images/qwikgeo.png
--------------------------------------------------------------------------------
/docs/docs/authentication/index.md:
--------------------------------------------------------------------------------
1 | # Authentication Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | -------------------------------------------------------------------------------- | --------------------------------------|
5 | | `POST` | `https://api.qwikgeo.com/api/v1/authentication/token` | [Token](#token) |
6 | | `POST` | `https://api.qwikgeo.com/api/v1/authentication/google_token_authenticate` | [Google Token Authenticate](#google-token-authenticate) |
7 |
8 |
9 | ## Endpoint Description's
10 |
11 | ## Token
12 |
13 | ### Description
14 | The token endpoint allows you to receive a JWT token to authenticate with the API.
15 |
16 | Token endpoint is available at `https://api.qwikgeo.com/api/v1/authentication/token`
17 |
18 | ### Example Input
19 | ```json
20 | {
21 | "username": "mrider3",
22 | "password": "secret"
23 | }
24 | ```
25 |
26 | ### Example Output
27 | ```json
28 | {
29 | "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6M30.PJZEu9eDOBqSQTWJkNMCdV__tvuETyEVRwA5wH9Ansc",
30 | "token_type": "Bearer"
31 | }
32 | ```
33 |
34 | ## Google Token Authenticate
35 |
36 | ### Description
37 | The google token authenticate endpoint allows you to receive a JWT token to authenticate with the API via a Google JWT Token.
38 |
39 | Google Token Authenticate endpoint is available at `https://api.qwikgeo.com/api/v1/authentication/google_token_authenticate`
40 |
41 | ### Example Input
42 | ```json
43 | {
44 | "token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6M30.PJZEu9eDOBqSQTWJkNMCdV__tvuETyEVRwA5wH9Ansc"
45 | }
46 | ```
47 |
48 | ### Example Output
49 | ```json
50 | {
51 | "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6M30.PJZEu9eDOBqSQTWJkNMCdV__tvuETyEVRwA5wH9Ansc",
52 | "token_type": "Bearer"
53 | }
54 | ```
55 |
--------------------------------------------------------------------------------
/docs/docs/data/cities.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "id": 52,
4 | "name": "Ashkāsham",
5 | "state_id": 3901,
6 | "state_code": "BDS",
7 | "state_name": "Badakhshan",
8 | "country_id": 1,
9 | "country_code": "AF",
10 | "country_name": "Afghanistan",
11 | "latitude": "36.68333000",
12 | "longitude": "71.53333000",
13 | "wikiDataId": "Q4805192"
14 | },
15 | {
16 | "id": 68,
17 | "name": "Fayzabad",
18 | "state_id": 3901,
19 | "state_code": "BDS",
20 | "state_name": "Badakhshan",
21 | "country_id": 1,
22 | "country_code": "AF",
23 | "country_name": "Afghanistan",
24 | "latitude": "37.11664000",
25 | "longitude": "70.58002000",
26 | "wikiDataId": "Q156558"
27 | },
28 | {
29 | "id": 78,
30 | "name": "Jurm",
31 | "state_id": 3901,
32 | "state_code": "BDS",
33 | "state_name": "Badakhshan",
34 | "country_id": 1,
35 | "country_code": "AF",
36 | "country_name": "Afghanistan",
37 | "latitude": "36.86477000",
38 | "longitude": "70.83421000",
39 | "wikiDataId": "Q10308323"
40 | }
41 | ]
--------------------------------------------------------------------------------
/docs/docs/data/state_data.csv:
--------------------------------------------------------------------------------
1 | State Abbr,Number of Rest Stops
2 | IL,56
3 | IN,84
4 | WI,213
5 |
--------------------------------------------------------------------------------
/docs/docs/data/us-state-capitals.csv:
--------------------------------------------------------------------------------
1 | name,description,latitude,longitude
2 | Alabama,Montgomery,32.377716,-86.300568
3 | Alaska,Juneau,58.301598,-134.420212
4 | Arizona,Phoenix,33.448143,-112.096962
5 | Arkansas,Little Rock,34.746613,-92.288986
6 | California,Sacramento,38.576668,-121.493629
7 | Colorado,Denver,39.739227,-104.984856
8 | Connecticut,Hartford
,41.764046,-72.682198
9 | Delaware,Dover,39.157307,-75.519722
10 | Hawaii,Honolulu,21.307442,-157.857376
11 | Florida,Tallahassee,30.438118,-84.281296
12 | Georgia,Atlanta
,33.749027,-84.388229
13 | Idaho,Boise,43.617775,-116.199722
14 | Illinois,Springfield,39.798363,-89.654961
15 | Indiana,Indianapolis,39.768623,-86.162643
16 | Iowa,Des Moines,41.591087,-93.603729
17 | Kansas,Topeka,39.048191,-95.677956
18 | Kentucky,Frankfort,38.186722,-84.875374
19 | Louisiana,Baton Rouge,30.457069,-91.187393
20 | Maine,Augusta,44.307167,-69.781693
21 | Maryland,Annapolis,38.978764,-76.490936
22 | Massachusetts,Boston,42.358162,-71.063698
23 | Michigan,Lansing,42.733635,-84.555328
24 | Minnesota,St. Paul,44.955097,-93.102211
25 | Mississippi,Jackson,32.303848,-90.182106
26 | Missouri,Jefferson City,38.579201,-92.172935
27 | Montana,Helena,46.585709,-112.018417
28 | Nebraska,Lincoln,40.808075,-96.699654
29 | Nevada,Carson City,39.163914,-119.766121
30 | New Hampshire,Concord,43.206898,-71.537994
31 | New Jersey,Trenton,40.220596,-74.769913
32 | New Mexico,Santa Fe,35.68224,-105.939728
33 | North Carolina,Raleigh,35.78043,-78.639099
34 | North Dakota,Bismarck,46.82085,-100.783318
35 | New York,Albany,42.652843,-73.757874
36 | Ohio,Columbus,39.961346,-82.999069
37 | Oklahoma,Oklahoma City,35.492207,-97.503342
38 | Oregon,Salem,44.938461,-123.030403
39 | Pennsylvania,Harrisburg,40.264378,-76.883598
40 | Rhode Island,Providence,41.830914,-71.414963
41 | South Carolina,Columbia,34.000343,-81.033211
42 | South Dakota,Pierre,44.367031,-100.346405
43 | Tennessee,Nashville,36.16581,-86.784241
44 | Texas,Austin,30.27467,-97.740349
45 | Utah,Salt Lake City,40.777477,-111.888237
46 | Vermont,Montpelier,44.262436,-72.580536
47 | Virginia,Richmond,37.538857,-77.43364
48 | Washington,Olympia,47.035805,-122.905014
49 | West Virginia,Charleston,38.336246,-81.612328
50 | Wisconsin,Madison,43.074684,-89.384445
51 | Wyoming,Cheyenne,41.140259,-104.820236
--------------------------------------------------------------------------------
/docs/docs/groups/index.md:
--------------------------------------------------------------------------------
1 | # Groups Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | --- | ----------- |
5 | | `GET` | `/api/v1/groups/` | [Groups](#groups) |
6 | | `POST` | `/api/v1/groups/` | [Create Group](#create-group) |
7 | | `GET` | `/api/v1/groups/{group_id}` | [Group](#group) |
8 | | `PUT` | `/api/v1/groups/{group_id}` | [Update Group](#update-group) |
9 | | `DELETE` | `/api/v1/groups/{group_id}` | [Delete Group](#delete-group) |
10 |
11 |
12 | ## Endpoint Description's
13 |
14 | ## Groups
15 |
16 | ### Description
17 | The groups endpoint returns all groups within QwikGeo.
18 |
19 | Groups endpoint is available at `https://api.qwikgeo.com/api/v1/groups`
20 |
21 | ### Example Output
22 | ```json
23 | [
24 | {
25 | "group_id": "f3ba05eb-dcce-4112-911f-147bb17ba866",
26 | "name": "New Group",
27 | "group_admins": [
28 | {
29 | "id": 1,
30 | "username": "mkeller3"
31 | }
32 | ],
33 | "group_users": [
34 | {
35 | "id": 7,
36 | "username": "mkeller3"
37 | }
38 | ]
39 | }
40 | ]
41 | ```
42 |
43 | ## Create Group
44 |
45 | ### Description
46 | The create group endpoint allows you create a group within QwikGeo.
47 |
48 | Tables endpoint is available at `https://api.qwikgeo.com/api/v1/groups/`
49 |
50 | ### Example Input
51 | ```json
52 | {
53 | "name": "New Group 2",
54 | "group_admins": [
55 | {
56 | "id": 1,
57 | "username": "mkeller3"
58 | }
59 | ],
60 | "group_users": [
61 | {
62 | "id": 7,
63 | "username": "mkeller3"
64 | }
65 | ]
66 | }
67 | ```
68 |
69 | ### Example Output
70 | ```json
71 | {
72 | "group_id": "f3ba05eb-dcce-4112-911f-147bb17ba866",
73 | "name": "New Group 2",
74 | "group_admins": [
75 | {
76 | "id": 1,
77 | "username": "mkeller3"
78 | }
79 | ],
80 | "group_users": [
81 | {
82 | "id": 7,
83 | "username": "mkeller3"
84 | }
85 | ]
86 | }
87 | ```
88 |
89 | ## Group
90 |
91 | ### Description
92 | The group endpoint returns a group you have access to within QwikGeo.
93 |
94 | Tables endpoint is available at `https://api.qwikgeo.com/api/v1/groups/{group_id}`
95 |
96 | ### Example Output
97 | ```json
98 | {
99 | "group_id": "f3ba05eb-dcce-4112-911f-147bb17ba866",
100 | "name": "New Group",
101 | "group_admins": [
102 | {
103 | "id": 1,
104 | "username": "mkeller3"
105 | }
106 | ],
107 | "group_users": [
108 | {
109 | "id": 7,
110 | "username": "mkeller3"
111 | }
112 | ]
113 | }
114 | ```
115 |
116 | ## Update Group
117 |
118 | ### Description
119 | The update group endpoint allows you edit a group have admin access to within QwikGeo.
120 |
121 | Tables endpoint is available at `https://api.qwikgeo.com/api/v1/groups/{group_id}`
122 |
123 | ### Example Input
124 | ```json
125 | {
126 | "group_id": "f3ba05eb-dcce-4112-911f-147bb17ba866",
127 | "name": "New Group 2",
128 | "group_admins": [
129 | {
130 | "id": 1,
131 | "username": "mkeller3"
132 | }
133 | ],
134 | "group_users": [
135 | {
136 | "id": 7,
137 | "username": "mkeller3"
138 | }
139 | ]
140 | }
141 | ```
142 |
143 | ### Example Output
144 | ```json
145 | {
146 | "group_id": "f3ba05eb-dcce-4112-911f-147bb17ba866",
147 | "name": "New Group 2",
148 | "group_admins": [
149 | {
150 | "id": 1,
151 | "username": "mkeller3"
152 | }
153 | ],
154 | "group_users": [
155 | {
156 | "id": 7,
157 | "username": "mkeller3"
158 | }
159 | ]
160 | }
161 | ```
162 |
163 | ## Delete Group
164 |
165 | ### Description
166 | The delete group endpoint allows you edit a group have admin access to within QwikGeo.
167 |
168 | Tables endpoint is available at `https://api.qwikgeo.com/api/v1/groups/{group_id}`
169 |
170 | ### Example Output
171 | ```json
172 | {
173 | "status": true
174 | }
175 | ```
--------------------------------------------------------------------------------
/docs/docs/imports/index.md:
--------------------------------------------------------------------------------
1 | # Imports Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | --- | ----------- |
5 | | `GET` | `/api/v1/imports/status/{process_id}` | [Import Status](#import-status) |
6 | | `POST` | `/api/v1/imports/arcgis_service` | [ArcGIS Service](#arcgis-service) |
7 | | `POST` | `/api/v1/imports/geographic_data_from_geographic_file` | [Geographic Data From Geographic File](#geographic-data-from-geographic-file) |
8 | | `POST` | `/api/v1/imports/geographic_data_from_csv` | [Geographic Data From CSV](#geographic-data-from-csv) |
9 | | `POST` | `/api/v1/imports/point_data_from_csv` | [Point Data From CSV](#point-data-from-csv) |
10 | | `POST` | `/api/v1/imports/geographic_data_from_json_file` | [Geographic Data From Json File](#geographic-data-from-json-file) |
11 | | `POST` | `/api/v1/imports/point_data_from_json_file` | [Point Data From Json File ](#point-data-from-json-file) |
12 | | `POST` | `/api/v1/imports/geographic_data_from_json_url` | [Geographic Data From Json URL](#geographic-data-from-json-url) |
13 | | `POST` | `/api/v1/imports/point_data_from_json_url` | [Point Data From Json URL](#point-data-from-json-url) |
14 | | `POST` | `/api/v1/imports/geojson_from_url` | [Geojson From URL](#geojson-from-url) |
15 |
16 |
17 | ## Endpoint Description's
18 |
19 | ## Import Status
20 | Any time an import is submitted it given a process_id to have the import run in the background using [FastAPI's Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). To check the
21 | status of an import, you can call this endpoint with the process_id.
22 |
23 | ### Example Call
24 | ```shell
25 | https://api.qwikgeo.com/api/v1/imports/status/472e29dc-91a8-41d3-b05f-cee34006e3f7
26 | ```
27 |
28 | ### Example Output - Still Running
29 | ```json
30 | {
31 | "status": "PENDING"
32 | }
33 | ```
34 |
35 | ### Example Output - Complete
36 | ```json
37 | {
38 | "status": "SUCCESS",
39 | "new_table_id": "shnxppipxrppsdkozuroilkubktfodibtqorhucjvxlcdrqyhh",
40 | "completion_time": "2022-07-06T19:33:17.950059",
41 | "run_time_in_seconds": 1.78599
42 | }
43 | ```
44 |
45 | ### Example Output - Error
46 | ```json
47 | {
48 | "status": "FAILURE",
49 | "error": "ERROR HERE",
50 | "completion_time": "2022-07-08T13:39:47.961389",
51 | "run_time_in_seconds": 0.040892
52 | }
53 | ```
54 |
55 | ## ArcGIS Service
56 |
57 | ### Description
58 | Import data from any `FeatureServer` or `MapServer` that allows for geojson as an output.
59 |
60 | Example: Download a point dataset of Tennesse State Parks.
61 |
62 | ### Example Input
63 | ```json
64 | {
65 | "title": "title",
66 | "description": "description",
67 | "url": "https://services5.arcgis.com/bPacKTm9cauMXVfn/ArcGIS/rest/services/TN_State_Parks_Points/FeatureServer/0",
68 | }
69 | ```
70 |
71 | ### Example Output
72 | ```json
73 | {
74 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
75 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
76 | }
77 | ```
78 |
79 | ## Geographic Data From Geographic File
80 |
81 | ### Description
82 | Import geographic data from a file/files.
83 |
84 | Example: Import geojson from [file](/data/states.geojson).
85 |
86 | ### Example Input
87 | ```json
88 | {
89 | "title": "title",
90 | "description": "description",
91 | "files": "FILES IN MULTI PART FORM"
92 | }
93 | ```
94 |
95 | ### Example Output
96 | ```json
97 | {
98 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
99 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
100 | }
101 | ```
102 |
103 | ## Geographic Data From CSV
104 |
105 | ### Description
106 | Import a csv [file](/data/state_data.csv) and join to a map already within the database based off a column.
107 |
108 | Example: Uploading a csv with two columns `state_abbr` and `Number of Rest Stops`
109 | and joining to the `states` map based off of the `state_abbr` column.
110 |
111 | ### Example Input
112 | ```json
113 | {
114 | "title": "title",
115 | "description": "description",
116 | "map_name": "states",
117 | "map_column": "state_abbr",
118 | "map_columns": ["state_abbr"],
119 | "table_column": "state_abbr",
120 | "table_columns": ["state_abbr","Number of Rest Stops"],
121 | "files": "FILES IN MULTI PART FORM"
122 | }
123 | ```
124 |
125 | ### Example Output
126 | ```json
127 | {
128 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
129 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
130 | }
131 | ```
132 |
133 | ## Point Data From CSV
134 |
135 | ### Description
136 | Import a csv file with latitude and longitude columns into database.
137 |
138 | Example: A csv [file](/data/us-states-capitals.csv) with latitude and longitude columns for US Capitals.
139 |
140 | ### Example Input
141 | ```json
142 | {
143 | "title": "title",
144 | "description": "description",
145 | "longitude": "longitude",
146 | "latitude": "latitude",
147 | "table_columns": ["name","description","latitude","longitude"],
148 | "files": "FILES IN MULTI PART FORM"
149 | }
150 | ```
151 |
152 | ### Example Output
153 | ```json
154 | {
155 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
156 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
157 | }
158 | ```
159 |
160 | ## Geographic Data From Json File
161 |
162 | ### Description
163 | Import json from a file and join to a map already within the database based off a column.
164 |
165 | Example: Import state date from a json [file](/data/states.json).
166 |
167 | ### Example Input
168 | ```json
169 | {
170 | "title": "title",
171 | "description": "description",
172 | "map_name": "states",
173 | "map_column": "state_abbr",
174 | "map_columns": ["state_abbr"],
175 | "table_column": "code",
176 | "table_columns": ["state","slug","code","nickname"],
177 | "files": "FILES IN MULTI PART FORM"
178 | }
179 | ```
180 |
181 | ### Example Output
182 | ```json
183 | {
184 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
185 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
186 | }
187 | ```
188 |
189 | ## Point Data From Json File
190 |
191 | ### Description
192 | Import point data from a Json file with latitude and longitude columns.
193 |
194 | Example: A json [file](/data/cities.json) that contains cities for the entire world.
195 |
196 | ### Example Input
197 | ```json
198 | {
199 | "title": "title",
200 | "description": "description",
201 | "longitude": "longitude",
202 | "latitude": "latitude",
203 | "table_columns": ["id","name","latitude","longitude","state_id","state_code","state_name","country_id","country_code","country_name","wikiDataId"],
204 | "files": "FILES IN MULTI PART FORM"
205 | }
206 | ```
207 |
208 | ### Example Output
209 | ```json
210 | {
211 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
212 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
213 | }
214 | ```
215 |
216 | ## Geographic Data From Json URL
217 |
218 | ### Description
219 | Import json from a url and join to a map already within the database based off a column.
220 |
221 | Example: Import state information from a gitlab url
222 |
223 | ### Example Input
224 | ```json
225 | {
226 | "title": "title",
227 | "description": "description",
228 | "map_column": "state_abbr",
229 | "table_column": "code",
230 | "table_columns": [
231 | "state",
232 | "slug",
233 | "code",
234 | "nickname"
235 | ],
236 | "map_name": "states",
237 | "map_columns": [
238 | "state_abbr"
239 | ],
240 | "url": "https://raw.githubusercontent.com/CivilServiceUSA/us-states/master/data/states.json"
241 | }
242 | ```
243 |
244 | ### Example Output
245 | ```json
246 | {
247 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
248 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
249 | }
250 | ```
251 |
252 | ## Point Data From Json URL
253 |
254 | ### Description
255 | Import json data from a url with latitude and longitude columns into database.
256 |
257 | Example: Import state centroids from a gitlab url
258 |
259 | ### Example Input
260 | ```json
261 | {
262 | "title": "title",
263 | "description": "description",
264 | "url": "https://raw.githubusercontent.com/dr5hn/countries-states-cities-database/master/states.json",
265 | "longitude": "longitude",
266 | "latitude": "latitude",
267 | "table_columns": ["id","name","latitude","longitude","state_code","country_id","country_code","country_name","type"],
268 | "files": "FILES IN MULTI PART FORM"
269 | }
270 | ```
271 |
272 | ### Example Output
273 | ```json
274 | {
275 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
276 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
277 | }
278 | ```
279 |
280 | ## Geojson From URL
281 |
282 | ### Description
283 | Import geojson from any url.
284 |
285 | Example: Input large earthquakes for the past month
286 |
287 | ### Example Input
288 | ```json
289 | {
290 | "title": "title",
291 | "description": "description",
292 | "url": "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/significant_month.geojson"
293 | }
294 | ```
295 |
296 | ### Example Output
297 | ```json
298 | {
299 | "process_id": "c8d7b8d8-3e82-4f93-b441-55a5f51c4171",
300 | "url": "https://api.qwikgeo.com/api/v1/imports/status/c8d7b8d8-3e82-4f93-b441-55a5f51c4171"
301 | }
302 | ```
--------------------------------------------------------------------------------
/docs/docs/index.md:
--------------------------------------------------------------------------------
1 | # QwikGeo API
2 |
3 | QwikGeo API is a Geospatial Platform as a Service (PaaS) that allows you to upload geospatial data into our platform to analyze, enrich, and visualize your data with others. QwikGeo API is used to serve [QwikGeo](https://app.qwikgeo.com), but can also be used via other mapping platforms/frameworks and third party api's.
4 |
5 |
6 | ## Getting Started
7 |
8 | ### 1. Create a free account with QwikGeo
9 |
10 | Using the [create user](/users/#create-user) endpoint, create an account.
11 |
12 | ### 2. Get an access token
13 | Use the [token](/authentication/#token) endpoint, to create a JWT token.
14 |
15 | ### 3. Start using the multitude of different api's QwikGeo has to offer.
16 |
17 | - Upload a dataset via the [imports api](/imports).
18 | - Perform a geospatial analysis via the [analysis api](/analysis).
19 | - Visualize a dataset as geojson or vector tiles with the [collections api](/collections).
20 |
--------------------------------------------------------------------------------
/docs/docs/items/index.md:
--------------------------------------------------------------------------------
1 | # Items Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | -------------------------------------------------------------------------------- | ------------------------------------------- |
5 | | `GET` | `https://api.qwikgeo.com/api/v1/items` | [Items](#items) |
6 | | `GET` | `https://api.qwikgeo.com/api/v1/item/{portal_id}` | [Item](#item) |
7 |
8 |
9 | ## Endpoint Description's
10 |
11 | ## Items
12 |
13 | ### Description
14 | The items endpoint returns a list of item you have access to within QwikGeo.
15 |
16 | Items endpoint is available at `https://api.qwikgeo.com/api/v1/items`
17 |
18 | ```shell
19 | curl https://api.qwikgeo.com/api/v1/items
20 | ```
21 |
22 | ### Example Output
23 | ```json
24 | [
25 | {
26 | "portal_id": "4c2b7906-ca54-4343-b68b-13fe1a9175af",
27 | "title": "Tennessee State Parks",
28 | "created_time": "2022-10-07T14:04:56.224550+00:00",
29 | "modified_time": "2022-10-07T14:04:56.224643+00:00",
30 | "tags": [],
31 | "description": "",
32 | "views": 3,
33 | "searchable": true,
34 | "item_type": "table",
35 | "url": null,
36 | "item_read_access_list": [
37 | {
38 | "id": 1,
39 | "name": "mkeller3"
40 | }
41 | ],
42 | "item_write_access_list": [
43 | {
44 | "id": 1,
45 | "name": "mkeller3"
46 | }
47 | ]
48 | }
49 | ]
50 | ```
51 |
52 | ## Item
53 |
54 | ### Description
55 | The item endpoint returns an item you have access to within QwikGeo.
56 |
57 | Items endpoint is available at `https://api.qwikgeo.com/api/v1/items/{portal_id}`
58 |
59 | ```shell
60 | curl https://api.qwikgeo.com/api/v1/items/{portal_id}
61 | ```
62 |
63 | ### Example Output
64 | ```json
65 | {
66 | "portal_id": "4c2b7906-ca54-4343-b68b-13fe1a9175af",
67 | "title": "Tennessee State Parks",
68 | "created_time": "2022-10-07T14:04:56.224550+00:00",
69 | "modified_time": "2022-10-07T14:04:56.224643+00:00",
70 | "tags": [],
71 | "description": "",
72 | "views": 3,
73 | "searchable": true,
74 | "item_type": "table",
75 | "url": null,
76 | "item_read_access_list": [
77 | {
78 | "id": 1,
79 | "name": "mkeller3"
80 | }
81 | ],
82 | "item_write_access_list": [
83 | {
84 | "id": 1,
85 | "name": "mkeller3"
86 | }
87 | ]
88 | }
89 | ```
--------------------------------------------------------------------------------
/docs/docs/stylesheets/extra.css:
--------------------------------------------------------------------------------
1 | /* https://github.com/squidfunk/mkdocs-material/blob/master/src/assets/stylesheets/main/_colors.scss */
2 | /* https://www.canva.com/colors/color-palettes/natural-naivete/ */
3 |
4 | :root > * {
5 | --md-primary-fg-color: #393e46;
6 | --md-accent-fg-color: #f9bf22;
7 | }
--------------------------------------------------------------------------------
/docs/docs/tables/index.md:
--------------------------------------------------------------------------------
1 | # Tables Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | -------------------------------------------------------------------------------- | ------------------------------------------- |
5 | | `GET` | `https://api.qwikgeo.com/api/v1/tables` | [Tables](#table) |
6 | | `GET` | `https://api.qwikgeo.com/api/v1/tables/{table_id}` | [Table](#table) |
7 | | `POST` | `https://api.qwikgeo.com/api/v1/tables/` | [Create Table](#create-table) |
8 | | `DELETE` | `https://api.qwikgeo.com/api/v1/tables/{table_id}` | [Delete Table](#delete-table) |
9 | | `POST` | `https://api.qwikgeo.com/api/v1/tables/{table_id}/add_column` | [Add Column](#add-column) |
10 | | `DELETE` | `https://api.qwikgeo.com/api/v1/tables/{table_id}/delete_column` | [Delete Column](#delete-column) |
11 |
12 |
13 | ## Endpoint Description's
14 |
15 | ## Tables
16 |
17 | ### Description
18 | The tables endpoint returns tables you have access to within QwikGeo.
19 |
20 | Tables endpoint is available at `https://api.qwikgeo.com/api/v1/tables`
21 |
22 | ### Example Output
23 | ```json
24 | [
25 | {
26 | "id": 1,
27 | "portal_id": {
28 | "portal_id": "4c2b7906-ca54-4343-b68b-13fe1a9175af",
29 | "title": "Tennessee State Parks",
30 | "created_time": "2022-10-07T14:04:56.224550+00:00",
31 | "modified_time": "2022-10-07T14:04:56.224643+00:00",
32 | "tags": [],
33 | "description": "",
34 | "views": 4,
35 | "searchable": true,
36 | "item_type": "table",
37 | "url": null,
38 | "item_read_access_list": [
39 | {
40 | "id": 1,
41 | "name": "mkeller3"
42 | }
43 | ],
44 | "item_write_access_list": [
45 | {
46 | "id": 1,
47 | "name": "mkeller3"
48 | }
49 | ]
50 | },
51 | "table_id": "vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps",
52 | "created_time": "2022-10-07T14:04:56.263222+00:00",
53 | "modified_time": "2022-10-07T14:04:56.263248+00:00"
54 | }
55 | ]
56 | ```
57 |
58 | ## Table
59 |
60 | ### Description
61 | The table endpoint returns a table you have access to within QwikGeo.
62 |
63 | Table endpoint is available at `https://api.qwikgeo.com/api/v1/tables/{table_id}`
64 |
65 | ### Example Output
66 | ```json
67 | {
68 | "id": 1,
69 | "portal_id": {
70 | "portal_id": "4c2b7906-ca54-4343-b68b-13fe1a9175af",
71 | "title": "Tennessee State Parks",
72 | "created_time": "2022-10-07T14:04:56.224550+00:00",
73 | "modified_time": "2022-10-07T14:04:56.224643+00:00",
74 | "tags": [],
75 | "description": "",
76 | "views": 4,
77 | "searchable": true,
78 | "item_type": "table",
79 | "url": null,
80 | "item_read_access_list": [
81 | {
82 | "id": 1,
83 | "name": "mkeller3"
84 | }
85 | ],
86 | "item_write_access_list": [
87 | {
88 | "id": 1,
89 | "name": "mkeller3"
90 | }
91 | ]
92 | },
93 | "table_id": "vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps",
94 | "created_time": "2022-10-07T14:04:56.263222+00:00",
95 | "modified_time": "2022-10-07T14:04:56.263248+00:00"
96 | }
97 | ```
98 |
99 | ## Add Column
100 |
101 | ### Description
102 | The add column endpoints allows you to add a new column to an existing table in the database.
103 |
104 | Add Column endpoint is available at `https://api.qwikgeo.com/api/v1/tables/{table_id}/add_column`
105 |
106 | ### Example
107 |
108 | In the example below, we are adding a column called `test` that is text for the table `vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps`.
109 |
110 | ### Example Input
111 | ```json
112 | {
113 | "column_name": "test",
114 | "column_type": "text"
115 | }
116 | ```
117 |
118 | ### Example Output
119 | ```json
120 | {
121 | "status": true
122 | }
123 |
124 | ```
125 |
126 | ## Delete Column
127 |
128 | ### Description
129 | The delete column endpoint allows you to delete a column in an existing table in the database.
130 |
131 |
132 | Delete Column endpoint is available at `https://api.qwikgeo.com/api/v1/tables/{table_id}/delete_column`
133 |
134 | ### Example
135 |
136 | In the example below, we are deleting a column called `test` from the table `vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps`.
137 |
138 | ### Example Input
139 | ```json
140 | {
141 | "column_name": "test"
142 | }
143 | ```
144 |
145 | ### Example Output
146 | ```json
147 | {
148 | "status": true
149 | }
150 | ```
151 |
152 | ## Create Table
153 |
154 | ### Description
155 | The create table endpoints allow you to create a new table inside of a database.
156 |
157 | Create Table endpoint is available at `https://api.qwikgeo.com/api/v1/tables/`
158 |
159 | ### Example
160 |
161 | In the example below, we are creating a new table called `vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps_new`. We are adding one column in the table called `postalcode`,
162 | and setting the table to have `POINT` geometry.
163 |
164 | ### Example Input
165 | ```json
166 | {
167 | "columns": [
168 | {
169 | "column_name": "postalcode",
170 | "column_type": "text"
171 | }
172 | ],
173 | "geometry_type": "POINT"
174 | }
175 | ```
176 |
177 | ### Example Output
178 | ```json
179 | {
180 | "status": true,
181 | "table_id": "vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps_new"
182 | }
183 | ```
184 |
185 | ## Delete Table
186 |
187 | ### Description
188 | The delete table endpoint allows you to delete a table within the database.
189 |
190 | Delete Table endpoint is available at `https://api.qwikgeo.com/api/v1/tables/{table_id}`
191 |
192 | ### Example
193 |
194 | In the example below, we are deleting a table called `vccvnkvhrmzsqqbbcacvjrlspfpdhbcthvjszbnfledgklxnps`.
195 |
196 | ### Example Output
197 | ```json
198 | {
199 | "status": true
200 | }
201 | ```
202 |
--------------------------------------------------------------------------------
/docs/docs/users/index.md:
--------------------------------------------------------------------------------
1 | # Users Endpoints
2 |
3 | | Method | URL | Description |
4 | | ------ | -------------------------------------------------------------------------------- | --------------------------------------|
5 | | `POST` | `https://api.qwikgeo.com/api/v1/users/` | [Create User](#create-user) |
6 | | `GET` | `https://api.qwikgeo.com/api/v1/users/me` | [User](#user) |
7 | | `PUT` | `https://api.qwikgeo.com/api/v1/users/me` | [Update User](#update-user) |
8 | | `DELETE` | `https://api.qwikgeo.com/api/v1/users/me` | [Delete User](#delete-user) |
9 | | `GET` | `https://api.qwikgeo.com/api/v1/users/` | [User Search](#user-search) |
10 |
11 | ## Endpoint Description's
12 |
13 | ## Create User
14 |
15 | ### Description
16 | The create user endpoint allows you to create a new user to use QwikGeo.
17 |
18 | Create user endpoint is available at `https://api.qwikgeo.com/api/v1/users/`
19 |
20 | ### Example Input
21 | ```json
22 | {
23 | "username": "johndoe",
24 | "password_hash": "secret",
25 | "first_name": "John",
26 | "last_name": "Doe",
27 | "email": "johndoe@email.com"
28 | }
29 | ```
30 |
31 | ### Example Output
32 | ```json
33 | {
34 | "id": 1,
35 | "username": "johndoe",
36 | "first_name": "John",
37 | "last_name": "Doe",
38 | "email": "johndoe@email.com",
39 | "photo_url": null,
40 | "created_at": "2022-08-19T18:44:55.415824+00:00",
41 | "modified_at": "2022-08-19T18:44:55.415846+00:00"
42 | }
43 | ```
44 |
45 | ## User
46 |
47 | ### Description
48 | The user endpoint allows you to view your user information.
49 |
50 | User endpoint is available at `https://api.qwikgeo.com/api/v1/users/me`
51 |
52 | ### Example Output
53 | ```json
54 | {
55 | "id": 1,
56 | "username": "johndoe",
57 | "first_name": "John",
58 | "last_name": "Doe",
59 | "email": "johndoe@email.com",
60 | "photo_url": null,
61 | "created_at": "2022-08-19T18:44:55.415824+00:00",
62 | "modified_at": "2022-08-19T18:44:55.415846+00:00"
63 | }
64 | ```
65 |
66 | ## Update User
67 |
68 | ### Description
69 | The update user endpoint allows you to update information about your account.
70 |
71 | Update user endpoint is available at `https://api.qwikgeo.com/api/v1/users/me`
72 |
73 | ### Example Input
74 | ```json
75 | {
76 | "username": "johndoe",
77 | "first_name": "John",
78 | "last_name": "Doe",
79 | "email": "newjohndoe@email.com"
80 | }
81 | ```
82 |
83 | ### Example Output
84 | ```json
85 | {
86 | "id": 1,
87 | "username": "johndoe",
88 | "first_name": "John",
89 | "last_name": "Doe",
90 | "email": "newjohndoe@email.com",
91 | "photo_url": null,
92 | "created_at": "2022-08-19T18:44:55.415824+00:00",
93 | "modified_at": "2022-08-19T18:44:55.415846+00:00"
94 | }
95 | ```
96 |
97 | ## Delete User
98 |
99 | ### Description
100 | The delete user endpoint allows you to delete your account.
101 |
102 | Delete user endpoint is available at `https://api.qwikgeo.com/api/v1/users/me`
103 |
104 | ### Example Output
105 | ```json
106 | {
107 | "message": "Deleted user."
108 | }
109 | ```
110 |
111 | ## User Search
112 |
113 | ### Description
114 | The update user endpoint allows you to update information about your account.
115 |
116 | Update user endpoint is available at `https://api.qwikgeo.com/api/v1/users/`
117 |
118 | ### Example
119 |
120 | Search for users who username contain `john`.
121 |
122 | ### Example Input
123 | ```shell
124 | curl `https://api.qwikgeo.com/api/v1/users/?username=john`
125 | ```
126 |
127 | ### Example Output
128 | ```json
129 | {
130 | "users": [
131 | {
132 | "username": "johndoe",
133 |
134 | }
135 | ]
136 | }
137 | ```
--------------------------------------------------------------------------------
/docs/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: QwikGeo API Docs
2 | site_url: https://docs.qwikgeo.com/
3 | site_description: Tech docs for QwikGeo
4 | site_author: Michael Keller
5 |
6 | repo_url: https://github.com/qwikgeo/qwikgeo-api
7 | repo_name: QwikGeo API
8 | edit_uri: blob/main/docs/
9 |
10 | extra_css:
11 | - stylesheets/extra.css
12 |
13 | theme:
14 | name: material
15 | logo: assets/images/favicon.ico
16 |
17 | markdown_extensions:
18 | - toc:
19 | permalink:
20 | - attr_list
21 | - def_list
22 | - pymdownx.highlight:
23 | anchor_linenums: true
24 | - pymdownx.inlinehilite
25 | - pymdownx.snippets
26 | - pymdownx.superfences
27 |
28 | dev_addr: '127.0.0.1:5000'
29 |
30 | extra:
31 | analytics:
32 | provider: google
33 | property: G-2LXJZ67GEV
--------------------------------------------------------------------------------
/migrations/models/0_20230825191735_init.sql:
--------------------------------------------------------------------------------
1 | -- upgrade --
2 | CREATE TABLE IF NOT EXISTS "group" (
3 | "group_id" UUID NOT NULL PRIMARY KEY,
4 | "name" VARCHAR(500) NOT NULL UNIQUE
5 | );
6 | COMMENT ON TABLE "group" IS 'Model for group in database';
7 | CREATE TABLE IF NOT EXISTS "groupadmin" (
8 | "id" SERIAL NOT NULL PRIMARY KEY,
9 | "username" VARCHAR(500) NOT NULL,
10 | "group_id_id" UUID NOT NULL REFERENCES "group" ("group_id") ON DELETE CASCADE
11 | );
12 | COMMENT ON TABLE "groupadmin" IS 'Model for group_user in database';
13 | CREATE TABLE IF NOT EXISTS "groupuser" (
14 | "id" SERIAL NOT NULL PRIMARY KEY,
15 | "username" VARCHAR(500) NOT NULL,
16 | "group_id_id" UUID NOT NULL REFERENCES "group" ("group_id") ON DELETE CASCADE
17 | );
18 | COMMENT ON TABLE "groupuser" IS 'Model for group_user in database';
19 | CREATE TABLE IF NOT EXISTS "user" (
20 | "id" SERIAL NOT NULL PRIMARY KEY,
21 | "username" VARCHAR(500) NOT NULL UNIQUE,
22 | "password_hash" VARCHAR(300),
23 | "first_name" VARCHAR(300) NOT NULL,
24 | "last_name" VARCHAR(300) NOT NULL,
25 | "photo_url" VARCHAR(1000),
26 | "email" VARCHAR(500) NOT NULL,
27 | "created_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
28 | "modified_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
29 | );
30 | COMMENT ON TABLE "user" IS 'Model for user in database';
31 | CREATE TABLE IF NOT EXISTS "item" (
32 | "portal_id" UUID NOT NULL PRIMARY KEY,
33 | "title" VARCHAR(500) NOT NULL,
34 | "created_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
35 | "modified_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
36 | "tags" JSONB NOT NULL,
37 | "description" TEXT NOT NULL,
38 | "views" INT NOT NULL,
39 | "searchable" BOOL NOT NULL DEFAULT True,
40 | "item_type" TEXT NOT NULL,
41 | "url" TEXT,
42 | "user_id" VARCHAR(500) NOT NULL REFERENCES "user" ("username") ON DELETE CASCADE
43 | );
44 | COMMENT ON TABLE "item" IS 'Model for item in database';
45 | CREATE TABLE IF NOT EXISTS "itemreadaccesslist" (
46 | "id" SERIAL NOT NULL PRIMARY KEY,
47 | "name" VARCHAR(500) NOT NULL,
48 | "portal_id_id" UUID NOT NULL REFERENCES "item" ("portal_id") ON DELETE CASCADE
49 | );
50 | COMMENT ON TABLE "itemreadaccesslist" IS 'Model for item_read_access_list in database';
51 | CREATE TABLE IF NOT EXISTS "itemwriteaccesslist" (
52 | "id" SERIAL NOT NULL PRIMARY KEY,
53 | "name" VARCHAR(500) NOT NULL,
54 | "portal_id_id" UUID NOT NULL REFERENCES "item" ("portal_id") ON DELETE CASCADE
55 | );
56 | COMMENT ON TABLE "itemwriteaccesslist" IS 'Model for item_read_access_list in database';
57 | CREATE TABLE IF NOT EXISTS "map" (
58 | "map_id" UUID NOT NULL PRIMARY KEY,
59 | "created_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
60 | "modified_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
61 | "pitch" INT NOT NULL DEFAULT 0,
62 | "bearing" INT NOT NULL DEFAULT 0,
63 | "basemap" VARCHAR(50) NOT NULL,
64 | "bounding_box" JSONB NOT NULL,
65 | "item_id" UUID NOT NULL REFERENCES "item" ("portal_id") ON DELETE CASCADE,
66 | "user_id" VARCHAR(500) NOT NULL REFERENCES "user" ("username") ON DELETE CASCADE
67 | );
68 | COMMENT ON TABLE "map" IS 'Model for map in database';
69 | CREATE TABLE IF NOT EXISTS "layer" (
70 | "id" SERIAL NOT NULL PRIMARY KEY,
71 | "layer_id" VARCHAR(1000) NOT NULL,
72 | "title" VARCHAR(500) NOT NULL,
73 | "description" VARCHAR(500) NOT NULL,
74 | "map_type" VARCHAR(50) NOT NULL,
75 | "mapbox_name" VARCHAR(50) NOT NULL,
76 | "geometry_type" VARCHAR(50) NOT NULL,
77 | "style" JSONB,
78 | "paint" JSONB,
79 | "layout" JSONB,
80 | "fill_paint" JSONB,
81 | "border_paint" JSONB,
82 | "bounding_box" JSONB NOT NULL,
83 | "map_id" UUID NOT NULL REFERENCES "map" ("map_id") ON DELETE CASCADE
84 | );
85 | COMMENT ON TABLE "layer" IS 'Model for layer in database';
86 | CREATE TABLE IF NOT EXISTS "table" (
87 | "table_id" VARCHAR(50) NOT NULL PRIMARY KEY,
88 | "created_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
89 | "modified_time" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
90 | "item_id" UUID NOT NULL REFERENCES "item" ("portal_id") ON DELETE CASCADE,
91 | "user_id" VARCHAR(500) NOT NULL REFERENCES "user" ("username") ON DELETE CASCADE
92 | );
93 | COMMENT ON TABLE "table" IS 'Model for table in database';
94 | CREATE TABLE IF NOT EXISTS "aerich" (
95 | "id" SERIAL NOT NULL PRIMARY KEY,
96 | "version" VARCHAR(255) NOT NULL,
97 | "app" VARCHAR(100) NOT NULL,
98 | "content" JSONB NOT NULL
99 | );
100 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.aerich]
2 | tortoise_orm = "qwikgeo_api.main.DB_CONFIG"
3 | location = "migrations"
4 | src_folder = "./."
5 |
--------------------------------------------------------------------------------
/qwikgeo_api/authentication_handler.py:
--------------------------------------------------------------------------------
1 | from fastapi import Request, HTTPException
2 | from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
3 | import jwt
4 |
5 | from qwikgeo_api import config
6 |
7 | class JWTBearer(HTTPBearer):
8 | def __init__(self, auto_error: bool = False):
9 | super(JWTBearer, self).__init__(auto_error=auto_error)
10 |
11 | async def __call__(self, request: Request):
12 | url_path_credentials = self.verify_api_key_param(request)
13 | header_credentials: HTTPAuthorizationCredentials = await super(JWTBearer, self).__call__(request)
14 |
15 | if url_path_credentials:
16 | if not self.verify_jwt(request.query_params['api_key']):
17 | raise HTTPException(status_code=403, detail="Invalid token or expired token.")
18 | user = jwt.decode(request.query_params['api_key'], config.SECRET_KEY, algorithms=["HS256"])
19 | return user['username']
20 |
21 | if header_credentials:
22 | if not header_credentials.scheme == "Bearer":
23 | raise HTTPException(status_code=403, detail="Invalid authentication scheme.")
24 | if not self.verify_jwt(header_credentials.credentials):
25 | raise HTTPException(status_code=403, detail="Invalid token or expired token.")
26 | user = jwt.decode(header_credentials.credentials, config.SECRET_KEY, algorithms=["HS256"])
27 | return user['username']
28 | else:
29 | raise HTTPException(status_code=403, detail="Invalid authorization code.")
30 |
31 | def verify_api_key_param(self, request: Request) -> bool:
32 |
33 | if 'api_key' in request.query_params:
34 | return {
35 | "scheme": "Bearer",
36 | "credentials": request.query_params['api_key']
37 | }
38 | else:
39 | return None
40 |
41 | def verify_jwt(self, jwt_token: str) -> bool:
42 | is_token_valid: bool = False
43 |
44 | try:
45 | payload = jwt.decode(jwt_token, config.SECRET_KEY, algorithms=["HS256"])
46 | except:
47 | payload = None
48 | if payload:
49 | is_token_valid = True
50 | return is_token_valid
--------------------------------------------------------------------------------
/qwikgeo_api/bins_sql.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Bins SQL"""
2 |
3 | EQUAL_INTERVAL_BINS_SQL = """
4 | -- https://github.com/CartoDB/cartodb-postgresql/tree/master/scripts-available
5 |
6 | CREATE OR REPLACE FUNCTION equal_interval_bins ( in_array anyarray, breaks INT ) RETURNS anyarray as $$
7 | WITH stats AS (
8 | SELECT min(e), (max(e)-min(e))/breaks AS del
9 | FROM (SELECT unnest(in_array) e) AS p)
10 | SELECT array_agg(bins)
11 | FROM (
12 | SELECT min + generate_series(1,breaks)*del AS bins
13 | FROM stats) q;
14 | $$ language SQL IMMUTABLE STRICT PARALLEL SAFE;
15 | """
16 |
17 | HEAD_TAIL_BINS_SQL= """
18 | CREATE OR REPLACE FUNCTION head_tail_bins ( in_array anyarray, breaks INT) RETURNS anyarray as $$
19 | DECLARE
20 | element_count INT4;
21 | arr_mean numeric;
22 | i INT := 2;
23 | reply numeric[];
24 | BEGIN
25 | -- get the total size of our row
26 | element_count := array_upper(in_array, 1) - array_lower(in_array, 1);
27 | -- ensure the ordering of in_array
28 | SELECT array_agg(e) INTO in_array FROM (SELECT unnest(in_array) e ORDER BY e) x;
29 | -- stop if no rows
30 | IF element_count IS NULL THEN
31 | RETURN NULL;
32 | END IF;
33 | -- stop if our breaks are more than our input array size
34 | IF element_count < breaks THEN
35 | RETURN in_array;
36 | END IF;
37 |
38 | -- get our mean value
39 | SELECT avg(v) INTO arr_mean FROM ( SELECT unnest(in_array) as v ) x;
40 |
41 | reply = Array[arr_mean];
42 | -- slice our bread
43 | LOOP
44 | IF i > breaks THEN EXIT; END IF;
45 | SELECT avg(e) INTO arr_mean FROM ( SELECT unnest(in_array) e) x WHERE e > reply[i-1];
46 | IF arr_mean IS NOT NULL THEN
47 | reply = array_append(reply, arr_mean);
48 | END IF;
49 | i := i+1;
50 | END LOOP;
51 | RETURN reply;
52 | END;
53 | $$ language plpgsql IMMUTABLE PARALLEL SAFE;
54 | """
55 |
56 | QUANTILE_BINS_SQL = """
57 | CREATE OR REPLACE FUNCTION quantile_bins(in_array numeric[], breaks int)
58 | RETURNS numeric[]
59 | AS $$
60 | SELECT
61 | percentile_disc(Array(SELECT generate_series(1, breaks) / breaks::numeric))
62 | WITHIN GROUP (ORDER BY x ASC) AS p
63 | FROM
64 | unnest(in_array) AS x;
65 | $$ language SQL IMMUTABLE STRICT PARALLEL SAFE;
66 | """
67 |
68 | JENKS_BIN_SQL_1 = """
69 | CREATE OR REPLACE FUNCTION jenk_bins(in_array anyarray, breaks INT, iterations INT DEFAULT 0, invert BOOLEAN DEFAULT FALSE)
70 | RETURNS NUMERIC[] as
71 | $$
72 | DECLARE
73 | in_matrix NUMERIC[][];
74 | in_unique_count BIGINT;
75 |
76 | shuffles INT;
77 | arr_mean NUMERIC;
78 | sdam NUMERIC;
79 |
80 | i INT;
81 | bot INT;
82 | top INT;
83 |
84 | tops INT[];
85 | classes INT[][];
86 | j INT := 1;
87 | curr_result NUMERIC[];
88 | best_result NUMERIC[];
89 | seedtarget TEXT;
90 |
91 | BEGIN
92 | -- We clean the input array (remove NULLs) and create 2 arrays
93 | -- [1] contains the unique values in in_array
94 | -- [2] contains the number of appearances of those unique values
95 | SELECT ARRAY[array_agg(value), array_agg(count)] FROM
96 | (
97 | SELECT value, count(1)::numeric as count
98 | FROM unnest(in_array) AS value
99 | WHERE value is NOT NULL
100 | GROUP BY value
101 | ORDER BY value
102 | ) __clean_array_q INTO in_matrix;
103 |
104 | -- Get the number of unique values
105 | in_unique_count := array_length(in_matrix[1:1], 2);
106 |
107 | IF in_unique_count IS NULL THEN
108 | RETURN NULL;
109 | END IF;
110 |
111 | IF in_unique_count <= breaks THEN
112 | -- There isn't enough distinct values for the requested breaks
113 | RETURN ARRAY(Select unnest(in_matrix[1:1])) _a;
114 | END IF;
115 |
116 | -- If not declated explicitly we iterate based on the length of the array
117 | IF iterations < 1 THEN
118 | -- This is based on a 'looks fine' heuristic
119 | iterations := log(in_unique_count)::integer + 1;
120 | END IF;
121 |
122 | -- We set the number of shuffles per iteration as the number of unique values but
123 | -- this is just another 'looks fine' heuristic
124 | shuffles := in_unique_count;
125 |
126 | -- Get the mean value of the whole vector (already ignores NULLs)
127 | SELECT avg(v) INTO arr_mean FROM ( SELECT unnest(in_array) as v ) x;
128 |
129 | -- Calculate the sum of squared deviations from the array mean (SDAM).
130 | SELECT sum(((arr_mean - v)^2) * w) INTO sdam FROM (
131 | SELECT unnest(in_matrix[1:1]) as v, unnest(in_matrix[2:2]) as w
132 | ) x;
133 |
134 | -- To start, we create ranges with approximately the same amount of different values
135 | top := 0;
136 | i := 1;
137 | LOOP
138 | bot := top + 1;
139 | top := ROUND(i * in_unique_count::numeric / breaks::NUMERIC);
140 |
141 | IF i = 1 THEN
142 | classes = ARRAY[ARRAY[bot,top]];
143 | ELSE
144 | classes = ARRAY_CAT(classes, ARRAY[bot,top]);
145 | END IF;
146 |
147 | i := i + 1;
148 | IF i > breaks THEN EXIT; END IF;
149 | END LOOP;
150 |
151 | best_result = jenk_bins_iteration(in_matrix, breaks, classes, invert, sdam, shuffles);
152 |
153 | --set the seed so we can ensure the same results
154 | SELECT setseed(0.4567) INTO seedtarget;
155 | --loop through random starting positions
156 | LOOP
157 | IF j > iterations-1 THEN EXIT; END IF;
158 | i = 1;
159 | tops = ARRAY[in_unique_count];
160 | LOOP
161 | IF i = breaks THEN EXIT; END IF;
162 | SELECT array_agg(distinct e) INTO tops FROM (
163 | SELECT unnest(array_cat(tops, ARRAY[trunc(random() * in_unique_count::float8)::int + 1])) as e ORDER BY e
164 | ) x;
165 | i = array_length(tops, 1);
166 | END LOOP;
167 | top := 0;
168 | i = 1;
169 | LOOP
170 | bot := top + 1;
171 | top = tops[i];
172 | IF i = 1 THEN
173 | classes = ARRAY[ARRAY[bot,top]];
174 | ELSE
175 | classes = ARRAY_CAT(classes, ARRAY[bot,top]);
176 | END IF;
177 |
178 | i := i+1;
179 | IF i > breaks THEN EXIT; END IF;
180 | END LOOP;
181 |
182 | curr_result = jenk_bins_iteration(in_matrix, breaks, classes, invert, sdam, shuffles);
183 |
184 | IF curr_result[1] > best_result[1] THEN
185 | best_result = curr_result;
186 | END IF;
187 |
188 | j = j+1;
189 | END LOOP;
190 |
191 | RETURN (best_result)[2:array_upper(best_result, 1)];
192 | END;
193 | $$ LANGUAGE PLPGSQL IMMUTABLE PARALLEL RESTRICTED;
194 | """
195 |
196 | JENKS_BIN_SQL_2 = """
197 |
198 | CREATE OR REPLACE FUNCTION jenk_bins_iteration ( in_matrix NUMERIC[], breaks INT, classes INT[], invert BOOLEAN, sdam NUMERIC, max_search INT DEFAULT 50) RETURNS NUMERIC[] as $$
199 | DECLARE
200 | i INT;
201 | iterations INT = 0;
202 |
203 | side INT := 2;
204 |
205 | gvf numeric := 0.0;
206 | new_gvf numeric;
207 | arr_gvf numeric[];
208 | arr_avg numeric[];
209 | class_avg numeric;
210 | class_dev numeric;
211 |
212 | class_max_i INT = 0;
213 | class_min_i INT = 0;
214 | dev_max numeric;
215 | dev_min numeric;
216 |
217 | best_classes INT[] = classes;
218 | best_gvf numeric[];
219 | best_avg numeric[];
220 | move_elements INT = 1;
221 |
222 | reply numeric[];
223 |
224 | BEGIN
225 |
226 | -- We fill the arrays with the initial values
227 | i = 0;
228 | LOOP
229 | IF i = breaks THEN EXIT; END IF;
230 | i = i + 1;
231 |
232 | -- Get class mean
233 | SELECT (sum(v * w) / sum(w)) INTO class_avg FROM (
234 | SELECT unnest(in_matrix[1:1][classes[i][1]:classes[i][2]]) as v,
235 | unnest(in_matrix[2:2][classes[i][1]:classes[i][2]]) as w
236 | ) x;
237 |
238 | -- Get class deviation
239 | SELECT sum((class_avg - v)^2 * w) INTO class_dev FROM (
240 | SELECT unnest(in_matrix[1:1][classes[i][1]:classes[i][2]]) as v,
241 | unnest(in_matrix[2:2][classes[i][1]:classes[i][2]]) as w
242 | ) x;
243 |
244 |
245 | IF i = 1 THEN
246 | arr_avg = ARRAY[class_avg];
247 | arr_gvf = ARRAY[class_dev];
248 | ELSE
249 | arr_avg = array_append(arr_avg, class_avg);
250 | arr_gvf = array_append(arr_gvf, class_dev);
251 | END IF;
252 | END LOOP;
253 |
254 | -- We copy the values to avoid recalculation when a failure happens
255 | best_avg = arr_avg;
256 | best_gvf = arr_gvf;
257 |
258 | iterations = 0;
259 | LOOP
260 | IF iterations = max_search THEN EXIT; END IF;
261 | iterations = iterations + 1;
262 |
263 | -- calculate our new GVF
264 | SELECT sdam - sum(e) INTO new_gvf FROM ( SELECT unnest(arr_gvf) as e ) x;
265 |
266 | -- Check if any improvement was made
267 | IF new_gvf <= gvf THEN
268 | -- If we were moving too many elements, go back and move less
269 | IF move_elements <= 2 OR class_max_i = class_min_i THEN
270 | EXIT;
271 | END IF;
272 |
273 | move_elements = GREATEST(move_elements / 8, 1);
274 |
275 | -- Rollback from saved statuses
276 | classes = best_classes;
277 | new_gvf = gvf;
278 |
279 | i = class_min_i;
280 | LOOP
281 | arr_avg[i] = best_avg[i];
282 | arr_gvf[i] = best_gvf[i];
283 |
284 | IF i = class_max_i THEN EXIT; END IF;
285 | i = i + 1;
286 | END LOOP;
287 | END IF;
288 |
289 | -- We search for the classes with the min and max deviation
290 | i = 1;
291 | class_min_i = 1;
292 | class_max_i = 1;
293 | dev_max = arr_gvf[1];
294 | dev_min = arr_gvf[1];
295 | LOOP
296 | IF i = breaks THEN EXIT; END IF;
297 | i = i + 1;
298 |
299 | IF arr_gvf[i] < dev_min THEN
300 | dev_min = arr_gvf[i];
301 | class_min_i = i;
302 | ELSE
303 | IF arr_gvf[i] > dev_max THEN
304 | dev_max = arr_gvf[i];
305 | class_max_i = i;
306 | END IF;
307 | END IF;
308 | END LOOP;
309 |
310 |
311 | -- Save best values for comparison and output
312 | gvf = new_gvf;
313 | best_classes = classes;
314 |
315 | -- Limit the moved elements as to not remove everything from class_max_i
316 | move_elements = LEAST(move_elements, classes[class_max_i][2] - classes[class_max_i][1]);
317 |
318 | -- Move `move_elements` from class_max_i to class_min_i
319 | IF class_min_i < class_max_i THEN
320 | i := class_min_i;
321 | LOOP
322 | IF i = class_max_i THEN EXIT; END IF;
323 | classes[i][2] = classes[i][2] + move_elements;
324 | i := i + 1;
325 | END LOOP;
326 |
327 | i := class_max_i;
328 | LOOP
329 | IF i = class_min_i THEN EXIT; END IF;
330 | classes[i][1] = classes[i][1] + move_elements;
331 | i := i - 1;
332 | END LOOP;
333 | ELSE
334 | i := class_min_i;
335 | LOOP
336 | IF i = class_max_i THEN EXIT; END IF;
337 | classes[i][1] = classes[i][1] - move_elements;
338 | i := i - 1;
339 | END LOOP;
340 |
341 | i := class_max_i;
342 | LOOP
343 | IF i = class_min_i THEN EXIT; END IF;
344 | classes[i][2] = classes[i][2] - move_elements;
345 | i := i + 1;
346 | END LOOP;
347 | END IF;
348 |
349 | -- Recalculate avg and deviation ONLY for the affected classes
350 | i = LEAST(class_min_i, class_max_i);
351 | class_max_i = GREATEST(class_min_i, class_max_i);
352 | class_min_i = i;
353 | LOOP
354 | SELECT (sum(v * w) / sum(w)) INTO class_avg FROM (
355 | SELECT unnest(in_matrix[1:1][classes[i][1]:classes[i][2]]) as v,
356 | unnest(in_matrix[2:2][classes[i][1]:classes[i][2]]) as w
357 | ) x;
358 |
359 | SELECT sum((class_avg - v)^2 * w) INTO class_dev FROM (
360 | SELECT unnest(in_matrix[1:1][classes[i][1]:classes[i][2]]) as v,
361 | unnest(in_matrix[2:2][classes[i][1]:classes[i][2]]) as w
362 | ) x;
363 |
364 | -- Save status (in case it's needed for rollback) and store the new one
365 | best_avg[i] = arr_avg[i];
366 | arr_avg[i] = class_avg;
367 |
368 | best_gvf[i] = arr_gvf[i];
369 | arr_gvf[i] = class_dev;
370 |
371 | IF i = class_max_i THEN EXIT; END IF;
372 | i = i + 1;
373 | END LOOP;
374 |
375 | move_elements = move_elements * 2;
376 |
377 | END LOOP;
378 |
379 | i = 1;
380 | LOOP
381 | IF invert = TRUE THEN
382 | side = 1; --default returns bottom side of breaks, invert returns top side
383 | END IF;
384 | reply = array_append(reply, unnest(in_matrix[1:1][best_classes[i][side]:best_classes[i][side]]));
385 | i = i+1;
386 | IF i > breaks THEN EXIT; END IF;
387 | END LOOP;
388 |
389 | reply = array_prepend(gvf, reply);
390 | RETURN reply;
391 |
392 | END;
393 | $$ LANGUAGE PLPGSQL IMMUTABLE PARALLEL SAFE;
394 | """
--------------------------------------------------------------------------------
/qwikgeo_api/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from dotenv import load_dotenv
3 |
4 | load_dotenv()
5 |
6 | DB_HOST = os.getenv('DB_HOST')
7 | DB_DATABASE = os.getenv('DB_DATABASE')
8 | DB_USERNAME = os.getenv('DB_USERNAME')
9 | DB_PASSWORD = os.getenv('DB_PASSWORD')
10 | DB_PORT = os.getenv('DB_PORT')
11 | CACHE_AGE_IN_SECONDS = int(os.getenv('CACHE_AGE_IN_SECONDS'))
12 | MAX_FEATURES_PER_TILE = int(os.getenv('MAX_FEATURES_PER_TILE'))
13 | SECRET_KEY = os.getenv('SECRET_KEY')
14 | GOOGLE_CLIENT_ID = os.getenv('GOOGLE_CLIENT_ID')
15 | JWT_TOKEN_EXPIRE_IN_MINUTES = os.getenv('JWT_TOKEN_EXPIRE_IN_MINUTES')
16 |
17 | NUMERIC_FIELDS = ['bigint','bigserial','double precision','integer','smallint','real','smallserial','serial','numeric','money']
18 |
--------------------------------------------------------------------------------
/qwikgeo_api/db.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Database Setup"""
2 |
3 | from fastapi import FastAPI
4 | import asyncpg
5 |
6 | from qwikgeo_api import config
7 | from qwikgeo_api import bins_sql
8 |
9 | async def connect_to_db(app: FastAPI) -> None:
10 | """
11 | Connect to all databases.
12 | """
13 | app.state.database = {}
14 |
15 | app.state.database = await asyncpg.create_pool(
16 | dsn=f"postgres://{config.DB_USERNAME}:{config.DB_PASSWORD}@{config.DB_HOST}:{config.DB_PORT}/{config.DB_DATABASE}",
17 | min_size=1,
18 | max_size=10,
19 | max_queries=50000,
20 | max_inactive_connection_lifetime=300,
21 | timeout=180 # 3 Minutes
22 | )
23 |
24 | async with app.state.database.acquire() as con:
25 |
26 | await con.fetchrow(bins_sql.EQUAL_INTERVAL_BINS_SQL)
27 | await con.fetchrow(bins_sql.HEAD_TAIL_BINS_SQL)
28 | await con.fetchrow(bins_sql.QUANTILE_BINS_SQL)
29 | await con.fetchrow(bins_sql.JENKS_BIN_SQL_1)
30 | await con.fetchrow(bins_sql.JENKS_BIN_SQL_2)
31 |
32 | async def close_db_connection(app: FastAPI) -> None:
33 | """
34 | Close connection for database.
35 | """
36 |
37 | await app.state.database.close()
38 |
--------------------------------------------------------------------------------
/qwikgeo_api/db_models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Database Models"""
2 |
3 | from passlib.hash import bcrypt
4 | from tortoise import fields, models
5 | from tortoise.contrib.pydantic import pydantic_model_creator
6 | from tortoise import Tortoise
7 |
8 | class User(models.Model):
9 | """Model for user in database"""
10 |
11 | id = fields.IntField(pk=True)
12 | username = fields.CharField(500, unique=True)
13 | password_hash = fields.CharField(max_length=300, null=True)
14 | first_name = fields.CharField(max_length=300)
15 | last_name = fields.CharField(max_length=300)
16 | photo_url = fields.CharField(max_length=1000, null=True)
17 | email = fields.CharField(max_length=500)
18 | created_time = fields.DatetimeField(auto_now_add=True)
19 | modified_time = fields.DatetimeField(auto_now=True)
20 |
21 | def verify_password(self, password: str):
22 | """Method used to verify password is correct against hash in database."""
23 |
24 | return bcrypt.verify(password, self.password_hash)
25 |
26 | class Group(models.Model):
27 | """Model for group in database"""
28 |
29 | group_id = fields.UUIDField(unique=True, indexable=True, pk=True)
30 | name = fields.CharField(500, unique=True)
31 | users = fields.ReverseRelation["GroupUser"]
32 | admins = fields.ReverseRelation["GroupAdmin"]
33 |
34 | class GroupUser(models.Model):
35 | """Model for group_user in database"""
36 |
37 | id = fields.IntField(pk=True)
38 | group_id: fields.ForeignKeyNullableRelation[Group] = fields.ForeignKeyField(
39 | model_name="models.Group",
40 | related_name="group_users",
41 | to_field="group_id",
42 | on_delete='CASCADE'
43 | )
44 | username = fields.CharField(500)
45 |
46 | class GroupAdmin(models.Model):
47 | """Model for group_user in database"""
48 |
49 | id = fields.IntField(pk=True)
50 | group_id: fields.ForeignKeyNullableRelation[Group] = fields.ForeignKeyField(
51 | model_name="models.Group",
52 | related_name="group_admins",
53 | to_field="group_id",
54 | on_delete='CASCADE'
55 | )
56 | username = fields.CharField(500)
57 |
58 |
59 | class Item(models.Model):
60 | """Model for item in database"""
61 |
62 | user: fields.ForeignKeyNullableRelation[User] = fields.ForeignKeyField(
63 | model_name="models.User",
64 | related_name="items",
65 | to_field="username",
66 | on_delete='CASCADE'
67 | )
68 | portal_id = fields.UUIDField(unique=True, indexable=True, pk=True)
69 | title = fields.CharField(max_length=500, indexable=True)
70 | created_time = fields.DatetimeField(auto_now_add=True)
71 | modified_time = fields.DatetimeField(auto_now=True)
72 | tags = fields.JSONField()
73 | description = fields.TextField()
74 | read_access_list = fields.ReverseRelation["ItemReadAccessList"]
75 | write_access_list = fields.ReverseRelation["ItemWriteAccessList"]
76 | views = fields.IntField()
77 | searchable = fields.BooleanField(default=True)
78 | item_type = fields.TextField()
79 | url = fields.TextField(null=True)
80 |
81 | class ItemReadAccessList(models.Model):
82 | """Model for item_read_access_list in database"""
83 |
84 | portal_id: fields.ForeignKeyRelation[Item] = fields.ForeignKeyField(
85 | model_name="models.Item",
86 | related_name="item_read_access_list",
87 | to_field="portal_id",
88 | on_delete='CASCADE'
89 | )
90 | name = fields.CharField(500)
91 |
92 | class ItemWriteAccessList(models.Model):
93 | """Model for item_read_access_list in database"""
94 |
95 | portal_id: fields.ForeignKeyRelation[Item] = fields.ForeignKeyField(
96 | model_name="models.Item",
97 | related_name="item_write_access_list",
98 | to_field="portal_id",
99 | on_delete='CASCADE'
100 | )
101 | name = fields.CharField(500)
102 |
103 | class Table(models.Model):
104 | """Model for table in database"""
105 |
106 | user: fields.ForeignKeyNullableRelation[User] = fields.ForeignKeyField(
107 | model_name="models.User",
108 | related_name="tables",
109 | to_field="username",
110 | on_delete='CASCADE'
111 | )
112 | item: fields.ForeignKeyNullableRelation[Item] = fields.ForeignKeyField(
113 | model_name="models.Item",
114 | related_name="tables",
115 | on_delete='CASCADE'
116 | )
117 | table_id = fields.CharField(50, pk=True)
118 | created_time = fields.DatetimeField(auto_now_add=True)
119 | modified_time = fields.DatetimeField(auto_now=True)
120 |
121 | class Map(models.Model):
122 | """Model for map in database"""
123 |
124 | user: fields.ForeignKeyNullableRelation[User] = fields.ForeignKeyField(
125 | model_name="models.User",
126 | related_name="maps",
127 | to_field="username",
128 | on_delete='CASCADE'
129 | )
130 | item: fields.ForeignKeyNullableRelation[Item] = fields.ForeignKeyField(
131 | model_name="models.Item",
132 | related_name="maps",
133 | on_delete='CASCADE'
134 | )
135 | map_id = fields.UUIDField(unique=True, indexable=True, pk=True)
136 | created_time = fields.DatetimeField(auto_now_add=True)
137 | modified_time = fields.DatetimeField(auto_now=True)
138 | pitch = fields.IntField(default=0)
139 | bearing = fields.IntField(default=0)
140 | basemap = fields.CharField(max_length=50)
141 | bounding_box = fields.JSONField()
142 | layers: fields.ReverseRelation["Layer"]
143 |
144 | class Layer(models.Model):
145 | """Model for layer in database"""
146 |
147 | layer_id = fields.CharField(max_length=1000)
148 | title = fields.CharField(max_length=500)
149 | description = fields.CharField(max_length=500)
150 | map_type = fields.CharField(max_length=50)
151 | mapbox_name = fields.CharField(max_length=50)
152 | geometry_type = fields.CharField(max_length=50)
153 | style = fields.JSONField(default={}, null=True)
154 | paint = fields.JSONField(default={}, null=True)
155 | layout = fields.JSONField(default={}, null=True)
156 | fill_paint = fields.JSONField(default={}, null=True)
157 | border_paint = fields.JSONField(default={}, null=True)
158 | bounding_box = fields.JSONField()
159 |
160 | map: fields.ForeignKeyRelation[Map] = fields.ForeignKeyField(
161 | "models.Map", related_name="layers", to_field="map_id",
162 | on_delete='CASCADE'
163 | )
164 |
165 |
166 | Tortoise.init_models(["qwikgeo_api.db_models"], "models")
167 |
168 | Group_Pydantic = pydantic_model_creator(Group, name="Group")
169 | Table_Pydantic = pydantic_model_creator(Table, name="Table")
170 | TableOut_Pydantic = pydantic_model_creator(Table, name="TableOut", exclude=("user","item.user","item.maps",))
171 | Map_Pydantic = pydantic_model_creator(Map, name="Map")
172 | MapOut_Pydantic = pydantic_model_creator(Map, name="MapOut", exclude=("user","item.user","item.tables",))
173 | Item_Pydantic = pydantic_model_creator(Item, name="Item")
174 | ItemOut_Pydantic = pydantic_model_creator(Item, name="ItemOut", exclude=("user","tables.user"))
175 | ItemReadAccessListPydantic = pydantic_model_creator(ItemReadAccessList, name="ItemReadAccessList")
176 |
--------------------------------------------------------------------------------
/qwikgeo_api/main.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API"""
2 |
3 | from fastapi import FastAPI, Request
4 | from fastapi.middleware.cors import CORSMiddleware
5 | from tortoise.contrib.fastapi import register_tortoise
6 | from prometheus_fastapi_instrumentator import Instrumentator
7 |
8 | from qwikgeo_api import db
9 | from qwikgeo_api import config
10 | from qwikgeo_api.routers.authentication import router as authentication_router
11 | from qwikgeo_api.routers.items.groups import router as groups_router
12 | from qwikgeo_api.routers.items.users import router as users_router
13 | from qwikgeo_api.routers.items.tables import router as tables_router
14 | from qwikgeo_api.routers.imports import router as imports_router
15 | from qwikgeo_api.routers.analysis import router as analysis_router
16 | from qwikgeo_api.routers.collections import router as collections_router
17 | from qwikgeo_api.routers.items.maps import router as maps_router
18 | from qwikgeo_api.routers.items import router as items_router
19 |
20 | DESCRIPTION = """A python api to create a geoportal."""
21 |
22 | DB_CONFIG = {
23 | "connections": {
24 | "default": f"postgres://{config.DB_USERNAME}:{config.DB_PASSWORD}@{config.DB_HOST}:{config.DB_PORT}/{config.DB_DATABASE}"
25 | },
26 | "apps": {
27 | "models": {
28 | "models": ["qwikgeo_api.db_models", "aerich.models"],
29 | "default_connection": "default",
30 | },
31 | }
32 | }
33 |
34 | app = FastAPI(
35 | title="QwikGeo API",
36 | description=DESCRIPTION,
37 | version="0.0.1",
38 | contact={
39 | "name": "Michael Keller",
40 | "email": "michaelkeller03@gmail.com",
41 | },
42 | license_info={
43 | "name": "The MIT License (MIT)",
44 | "url": "https://mit-license.org/",
45 | },
46 | )
47 |
48 | app.add_middleware(
49 | CORSMiddleware,
50 | allow_origins=["*"],
51 | allow_credentials=True,
52 | allow_methods=["*"],
53 | allow_headers=["*"],
54 | )
55 |
56 | app.include_router(
57 | authentication_router.router,
58 | prefix="/api/v1/authentication",
59 | tags=["Authentication"],
60 | )
61 |
62 | app.include_router(
63 | items_router.router,
64 | prefix="/api/v1/items",
65 | tags=["Items"],
66 | )
67 |
68 | app.include_router(
69 | groups_router.router,
70 | prefix="/api/v1/items/groups",
71 | tags=["Groups"],
72 | )
73 |
74 | app.include_router(
75 | users_router.router,
76 | prefix="/api/v1/items/users",
77 | tags=["Users"],
78 | )
79 |
80 | app.include_router(
81 | tables_router.router,
82 | prefix="/api/v1/items/tables",
83 | tags=["Tables"],
84 | )
85 |
86 | app.include_router(
87 | imports_router.router,
88 | prefix="/api/v1/imports",
89 | tags=["Imports"],
90 | )
91 |
92 | app.include_router(
93 | analysis_router.router,
94 | prefix="/api/v1/analysis",
95 | tags=["Analysis"],
96 | )
97 |
98 | app.include_router(
99 | collections_router.router,
100 | prefix="/api/v1/collections",
101 | tags=["Collections"],
102 | )
103 |
104 | app.include_router(
105 | maps_router.router,
106 | prefix="/api/v1/items/maps",
107 | tags=["Maps"],
108 | )
109 |
110 | @app.on_event("startup")
111 | async def startup_event():
112 | """Application startup: register the database connection and create table list."""
113 | await db.connect_to_db(app)
114 |
115 | @app.on_event("shutdown")
116 | async def shutdown_event():
117 | """Application shutdown: de-register the database connection."""
118 |
119 | await db.close_db_connection(app)
120 |
121 | @app.get(
122 | path="/api/v1/",
123 | tags=["Landing Page"],
124 | responses={
125 | 200: {
126 | "description": "Successful Response",
127 | "content": {
128 | "application/json": {
129 | "example": {
130 | "links": [
131 | {
132 | "rel": "self",
133 | "type": "application/json",
134 | "title": "This document as JSON",
135 | "href": "https://api.qwikgeo.com/api/v1/"
136 | },
137 | {
138 | "rel": "conformance",
139 | "type": "application/json",
140 | "title": "Conformance",
141 | "href": "https://api.qwikgeo.com/api/v1/conformance"
142 | },
143 | {
144 | "rel": "data",
145 | "type": "application/json",
146 | "title": "Collections",
147 | "href": "https://api.qwikgeo.com/api/v1/collections"
148 | },
149 | {
150 | "rel": "service-desc",
151 | "type": "application/vnd.oai.openapi+json;version=3.0",
152 | "title": "The OpenAPI definition as JSON",
153 | "href": "https://api.qwikgeo.com/openapi.json"
154 | }
155 | ],
156 | "title": "QwikGeo API",
157 | "description": DESCRIPTION
158 | }
159 | }
160 | }
161 | },
162 | 500: {
163 | "description": "Internal Server Error",
164 | "content": {
165 | "application/json": {
166 | "Internal Server Error"
167 | }
168 | }
169 | }
170 | }
171 | )
172 | async def landing_page(
173 | request: Request
174 | ):
175 | """Get landing page."""
176 |
177 | url = str(request.base_url)
178 |
179 | return {
180 | "links": [
181 | {
182 | "rel": "self",
183 | "type": "application/json",
184 | "title": "This document as JSON",
185 | "href": f"{url}api/v1/"
186 | },
187 | {
188 | "rel": "conformance",
189 | "type": "application/json",
190 | "title": "Conformance",
191 | "href": f"{url}api/v1/conformance"
192 | },
193 | {
194 | "rel": "data",
195 | "type": "application/json",
196 | "title": "Collections",
197 | "href": f"{url}api/v1/collections"
198 | },
199 | {
200 | "rel": "service-desc",
201 | "type": "application/vnd.oai.openapi+json;version=3.0",
202 | "title": "The OpenAPI definition as JSON",
203 | "href": f"{url}openapi.json"
204 | }
205 | ],
206 | "title": "QwikGeo API",
207 | "description": DESCRIPTION
208 | }
209 |
210 | @app.get(
211 | path="/api/v1/conformance",
212 | tags=["Conformance"],
213 | responses={
214 | 200: {
215 | "description": "Successful Response",
216 | "content": {
217 | "application/json": {
218 | "example": {
219 | "conformsTo": [
220 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core",
221 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson",
222 | "http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter",
223 | "http://www.opengis.net/spec/ogcapi-features-4/1.0/req/features",
224 | "http://www.opengis.net/spec/ogcapi-tiles-1/1.0/conf/core"
225 | ]
226 | }
227 | }
228 | }
229 | },
230 | 500: {
231 | "description": "Internal Server Error",
232 | "content": {
233 | "application/json": {
234 | "Internal Server Error"
235 | }
236 | }
237 | }
238 | }
239 | )
240 | async def conformance():
241 | """Get conformance of api."""
242 |
243 | return {
244 | "conformsTo": [
245 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core",
246 | "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson",
247 | "http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter",
248 | "http://www.opengis.net/spec/ogcapi-features-4/1.0/req/features",
249 | "http://www.opengis.net/spec/ogcapi-tiles-1/1.0/conf/core"
250 | ]
251 | }
252 |
253 | @app.get(
254 | path="/api/v1/health_check",
255 | tags=["Health"],
256 | responses={
257 | 200: {
258 | "description": "Successful Response",
259 | "content": {
260 | "application/json": {
261 | "example": {"status": "UP"}
262 | }
263 | }
264 | },
265 | 500: {
266 | "description": "Internal Server Error",
267 | "content": {
268 | "application/json": {
269 | "Internal Server Error"
270 | }
271 | }
272 | }
273 | }
274 | )
275 | async def health():
276 | """Method used to verify server is healthy."""
277 |
278 | return {"status": "UP"}
279 |
280 | register_tortoise(
281 | app,
282 | config=DB_CONFIG,
283 | generate_schemas=True,
284 | add_exception_handlers=True
285 | )
286 |
287 |
288 | Instrumentator().instrument(app).expose(app)
289 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/analysis/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Analysis - Models"""
2 |
3 | from pydantic import BaseModel, Field
4 |
5 | class BaseAnalysisModel(BaseModel):
6 | """Model for base analysis"""
7 |
8 | table_id: str = Field(
9 | default=None, title="Name of the table to perform analysis on."
10 | )
11 |
12 | class BaseResponseModel(BaseModel):
13 | """Model for base analysis response"""
14 |
15 | process_id: str = Field(
16 | default="472e29dc-91a8-41d3-b05f-cee34006e3f7"
17 | )
18 | url: str = Field(
19 | default="https://api.qwikgeo.com/api/v1/analysis/status/472e29dc-91a8-41d3-b05f-cee34006e3f7"
20 | )
21 |
22 | class BadResponseModel(BaseModel):
23 | """Model for bad analysis response"""
24 |
25 | status: str = Field(
26 | default="FAILURE"
27 | )
28 | completion_time: str = Field(
29 | default="2022-07-06T19:33:17.950059"
30 | )
31 | run_time_in_seconds: float = Field(
32 | default=1.78599
33 | )
34 |
35 | class BufferModel(BaseModel):
36 | """Model for buffer analysis"""
37 |
38 | table_id: str = Field(
39 | default=None, title="Name of the table to perform analysis on."
40 | )
41 | distance_in_kilometers: float = Field(
42 | default=None, title="Size of buffer in kilometers."
43 | )
44 |
45 | class DissolveByValueModel(BaseModel):
46 | """Model for dissolve by value analysis"""
47 |
48 | table_id: str = Field(
49 | default=None, title="Name of the table to perform analysis on."
50 | )
51 | column: str = Field(
52 | default=None, title="Column used to dissolve geometry."
53 | )
54 |
55 | class GridModel(BaseModel):
56 | """Model for grid analysis"""
57 |
58 | table_id: str = Field(
59 | default=None, title="Name of the table to perform analysis on."
60 | )
61 | grid_size_in_kilometers: float = Field(
62 | default=None, title="Size of grids in kilometers."
63 | )
64 |
65 | class KMeansModel(BaseModel):
66 | """Model for k means cluster analysis"""
67 |
68 | table_id: str = Field(
69 | default=None, title="Name of the table to perform analysis on."
70 | )
71 | number_of_clusters: int = Field(
72 | default=None, title="Number of clusters to group points together."
73 | )
74 |
75 | class FindWithinDistanceModel(BaseModel):
76 | """Model for find within distance analysis"""
77 |
78 | table_id: str = Field(
79 | default=None, title="Name of the table to perform analysis on."
80 | )
81 | latitude: float = Field(
82 | default=None, title="Starting Latitude."
83 | )
84 | longitude: float = Field(
85 | default=None, title="Starting Latitude."
86 | )
87 | distance_in_kilometers: float = Field(
88 | default=None, title="Size to search in kilometers."
89 | )
90 |
91 | class PolygonsModel(BaseModel):
92 | """Model for polygon based analyses"""
93 |
94 | table_id: str = Field(
95 | default=None, title="Name of the table to perform analysis on."
96 | )
97 | polygons: str = Field(
98 | default=None, title="Name of the table of polygons."
99 | )
100 |
101 | class AggregatePointsByGridsModel(BaseModel):
102 | """Model for aggregate points by grid analysis"""
103 |
104 | table_id: str = Field(
105 | default=None, title="Name of the table to perform analysis on."
106 | )
107 | distance_in_kilometers: float = Field(
108 | default=None, title="Size to search in kilometers."
109 | )
110 | grid_type: str = Field(
111 | default=None, title="Type of grid to use."
112 | )
113 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/authentication/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Authentication - Models"""
2 |
3 | from tortoise.contrib.pydantic import pydantic_model_creator
4 | from pydantic import BaseModel
5 |
6 | from qwikgeo_api import db_models
7 |
8 | class Login(BaseModel):
9 | """Model for creating a new user"""
10 |
11 | username: str
12 | password: str
13 |
14 | class GoogleTokenAuthenticate(BaseModel):
15 | """Model for logging in with Google JWT"""
16 |
17 | token: str
18 |
19 | class Status(BaseModel):
20 | """Model for returning a request with a message"""
21 |
22 | message: str
23 |
24 | class TokenResponse(BaseModel):
25 | """Model for returning an JWT token"""
26 |
27 | access_token: str
28 | token_type: str="Bearer"
29 |
30 | User_Pydantic = pydantic_model_creator(db_models.User, name="User", exclude=("password_hash", ))
31 | UserIn_Pydantic = pydantic_model_creator(db_models.User, name="UserIn", exclude_readonly=True)
32 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/authentication/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Authentication"""
2 |
3 | from datetime import datetime, timedelta
4 | from fastapi import APIRouter, HTTPException
5 | from tortoise import exceptions
6 | import jwt
7 | from google.oauth2 import id_token
8 | from google.auth.transport import requests
9 |
10 | from qwikgeo_api import db_models
11 | import qwikgeo_api.routers.authentication.models as models
12 | from qwikgeo_api import utilities
13 | from qwikgeo_api import config
14 |
15 | router = APIRouter()
16 |
17 | @router.post(
18 | path='/token',
19 | response_model=models.TokenResponse,
20 | responses={
21 | 401: {
22 | "description": "Unauthorized",
23 | "content": {
24 | "application/json": {
25 | "example": {"detail": "Invalid username or password."}
26 | }
27 | }
28 | },
29 | 500: {
30 | "description": "Internal Server Error",
31 | "content": {
32 | "application/json": {
33 | "Internal Server Error"
34 | }
35 | }
36 | }
37 | }
38 | )
39 | async def create_token(
40 | form_data: models.Login
41 | ):
42 | """
43 | Create a JWT token to authenticate with api via a valid username and password.
44 | More information at https://docs.qwikgeo.com/authentication/#token
45 | """
46 |
47 | user = await utilities.authenticate_user(form_data.username, form_data.password)
48 |
49 | user_obj = await models.User_Pydantic.from_tortoise_orm(user)
50 |
51 | expire = datetime.utcnow() + timedelta(minutes=int(config.JWT_TOKEN_EXPIRE_IN_MINUTES))
52 | token = jwt.encode(
53 | {
54 | "username": user_obj.username,
55 | "exp": expire
56 | },
57 | config.SECRET_KEY
58 | )
59 |
60 | return {'access_token' : token, 'token_type' : 'Bearer'}
61 |
62 | @router.post(
63 | path='/google_token_authenticate',
64 | response_model=models.TokenResponse,
65 | responses={
66 | 400: {
67 | "description": "Bad Request",
68 | "content": {
69 | "application/json": {
70 | "example": {"detail": "error here"}
71 | }
72 | }
73 | },
74 | 500: {
75 | "description": "Internal Server Error",
76 | "content": {
77 | "application/json": {
78 | "Internal Server Error"
79 | }
80 | }
81 | }
82 | }
83 | )
84 | async def google_token_authenticate(
85 | info: models.GoogleTokenAuthenticate
86 | ):
87 | """
88 | Create a JWT token to authenticate with api via a valid Google JWT.
89 | More information at https://docs.qwikgeo.com/authentication/#google-token-authenticate
90 | """
91 |
92 | try:
93 | user = id_token.verify_oauth2_token(info.token, requests.Request(), config.GOOGLE_CLIENT_ID)
94 | except ValueError as exc:
95 | raise HTTPException(status_code=400, detail=str(exc)) from exc
96 |
97 | try:
98 | user_obj = db_models.User(
99 | username=user['email'].split("@")[0],
100 | first_name=user['given_name'],
101 | last_name=user['family_name'],
102 | photo_url=user['picture'],
103 | email=user['email']
104 | )
105 | await user_obj.save()
106 |
107 | except exceptions.IntegrityError:
108 | pass
109 |
110 | expire = datetime.utcnow() + timedelta(minutes=int(config.JWT_TOKEN_EXPIRE_IN_MINUTES))
111 | token = jwt.encode({
112 | "username": user['email'].split("@")[0],
113 | "exp": expire
114 | }, config.SECRET_KEY
115 | )
116 |
117 | return {'access_token' : token, 'token_type' : 'Bearer'}
118 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/collections/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Collections - Models"""
2 |
3 | from typing import NamedTuple, Union, Literal, Optional, List
4 | from pydantic import BaseModel, Field
5 | from typing_extensions import Annotated
6 |
7 | class AddColumn(BaseModel):
8 | """Model for adding a column to a table"""
9 |
10 | column_name: str
11 | column_type: Literal['text','integer','bigint','double precision','boolean','time','uuid']
12 |
13 | class DeleteColumn(BaseModel):
14 | """Model for deleting a column from a table"""
15 |
16 | column_name: str
17 |
18 | LonField = Annotated[
19 | Union[float, int],
20 | Field(
21 | title='Coordinate longitude',
22 | gt=-180,
23 | lt=180,
24 | ),
25 | ]
26 |
27 | LatField = Annotated[
28 | Union[float, int],
29 | Field(
30 | title='Coordinate latitude',
31 | gt=-90,
32 | lt=90,
33 | ),
34 | ]
35 |
36 | class Coordinates(NamedTuple):
37 | """Class for creating coordinates"""
38 |
39 | lon: LonField
40 | lat: LatField
41 |
42 | class GeojsonGeometry(BaseModel):
43 | """Model for geojson geometry"""
44 |
45 | type: Literal['Point','MultiPoint','LineString','MultiLineString','Polygon','MultiPolygon']
46 | coordinates: Coordinates
47 |
48 |
49 | class Geojson(BaseModel):
50 | """Model for geojson"""
51 |
52 | type: Literal['Feature']
53 | geometry: GeojsonGeometry
54 | properties: object
55 | id: Optional[int]
56 |
57 | class AggregateModel(BaseModel):
58 | """Model for aggregating data on a numerical column for a table"""
59 |
60 | type: Literal['distinct', 'avg', 'count', 'sum', 'max', 'min']=None
61 | column: str
62 | group_column: Optional[str]
63 | group_method: Optional[str]
64 |
65 |
66 | class StatisticsModel(BaseModel):
67 | """Model for performing statistics on a numerical column for a table"""
68 |
69 | coordinates: str = Field(
70 | default=None, title="A list of coordinates to perform statistics in a certain geographical area."
71 | )
72 | geometry_type: Literal['POINT', 'LINESTRING', 'POLYGON']=None
73 | spatial_relationship: Literal['ST_Intersects', 'ST_Crosses', 'ST_Within', 'ST_Contains', 'ST_Overlaps', 'ST_Disjoint', 'ST_Touches']=None
74 | aggregate_columns: List[AggregateModel]
75 | filter: str=None
76 |
77 | class BinsModel(BaseModel):
78 | """Model for creating bins on a numerical column for a table"""
79 |
80 | coordinates: str = Field(
81 | default=None, title="A list of coordinates to perform statistics in a certain geographical area."
82 | )
83 | geometry_type: Literal['POINT', 'LINESTRING', 'POLYGON']=None
84 | spatial_relationship: Literal['ST_Intersects', 'ST_Crosses', 'ST_Within', 'ST_Contains', 'ST_Overlaps', 'ST_Disjoint', 'ST_Touches']=None
85 | filter: str=None
86 | number_of_bins: int=10
87 | column: str
88 |
89 | class NumericBreaksModel(BaseModel):
90 | """Model for creating numerical breaks on a numerical column for a table"""
91 |
92 | coordinates: str = Field(
93 | default=None, title="A list of coordinates to perform statistics in a certain geographical area."
94 | )
95 | geometry_type: Literal['POINT', 'LINESTRING', 'POLYGON']=None
96 | spatial_relationship: Literal['ST_Intersects', 'ST_Crosses', 'ST_Within', 'ST_Contains', 'ST_Overlaps', 'ST_Disjoint', 'ST_Touches']=None
97 | filter: str=None
98 | number_of_breaks: int
99 | column: str
100 | break_type: Literal['equal_interval', 'head_tail', 'quantile', 'jenk']
101 |
102 | class BinModel(BaseModel):
103 | """Model for creating bins"""
104 |
105 | min: float
106 | max: float
107 |
108 | class CustomBreaksModel(BaseModel):
109 | """Model for creating custom breaks on a numerical column for a table"""
110 |
111 | coordinates: str = Field(
112 | default=None, title="A list of coordinates to perform statistics in a certain geographical area."
113 | )
114 | geometry_type: Literal['POINT', 'LINESTRING', 'POLYGON']=None
115 | spatial_relationship: Literal['ST_Intersects', 'ST_Crosses', 'ST_Within', 'ST_Contains', 'ST_Overlaps', 'ST_Disjoint', 'ST_Touches']=None
116 | filter: str=None
117 | column: str
118 | breaks: List[BinModel]
--------------------------------------------------------------------------------
/qwikgeo_api/routers/imports/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Imports - Models"""
2 |
3 | from pydantic import BaseModel, Field
4 |
5 | class BaseResponseModel(BaseModel):
6 | """Model for base response"""
7 |
8 | process_id: str = Field(
9 | default="472e29dc-91a8-41d3-b05f-cee34006e3f7"
10 | )
11 | url: str = Field(
12 | default="https://api.qwikgeo.com/api/v1/analysis/status/472e29dc-91a8-41d3-b05f-cee34006e3f7"
13 | )
14 |
15 | class ArcgisModel(BaseModel):
16 | """Model for importing arcgis data"""
17 |
18 | url: str = Field(
19 | title="The url that contains the service to download."
20 | )
21 | token: str = Field(
22 | default=None, title="If endpoint is authenticated, token will be used to download the service."
23 | )
24 | filter: str = Field(
25 | default="1=1", title="Add a filter to limit results back from a service."
26 | )
27 | title: str = Field(
28 | title="The name of the dataset within GeoPortal."
29 | )
30 | tags: list=[]
31 | description: str = Field(
32 | title="A description about the dataset.",
33 | default=""
34 | )
35 | read_access_list: list=[]
36 | write_access_list: list=[]
37 | searchable: bool=True
38 |
39 | class PointJsonUrl(BaseModel):
40 | """Model for importing json data with point data"""
41 |
42 | latitude: str
43 | longitude: str
44 | table_columns: list
45 | url: str
46 | title: str = Field(
47 | title="The name of the dataset within GeoPortal."
48 | )
49 | tags: list=[]
50 | description: str = Field(
51 | title="A description about the dataset.",
52 | default=""
53 | )
54 | read_access_list: list=[]
55 | write_access_list: list=[]
56 | searchable: bool=True
57 |
58 | class GeographicJsonUrl(BaseModel):
59 | """Model for importing json data with geographic boundaries"""
60 |
61 | map_name: str
62 | map_column: str
63 | map_columns: list
64 | table_columns: list
65 | table_column: str
66 | url: str
67 | title: str = Field(
68 | title="The name of the dataset within GeoPortal."
69 | )
70 | tags: list=[]
71 | description: str = Field(
72 | title="A description about the dataset.",
73 | default=""
74 | )
75 | read_access_list: list=[]
76 | write_access_list: list=[]
77 | searchable: bool=True
78 |
79 | class GeojsonUrl(BaseModel):
80 | """Model for importing geojson data from a url"""
81 |
82 | url: str
83 | title: str = Field(
84 | title="The name of the dataset within GeoPortal."
85 | )
86 | tags: list=[]
87 | description: str = Field(
88 | title="A description about the dataset.",
89 | default=""
90 | )
91 | read_access_list: list=[]
92 | write_access_list: list=[]
93 | searchable: bool=True
94 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/imports/utilities.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Import Utilities"""
2 |
3 | import os
4 | import json
5 |
6 | import datetime
7 | import subprocess
8 | from fastapi import FastAPI
9 | import aiohttp
10 | import pandas as pd
11 |
12 | from qwikgeo_api import utilities
13 | from qwikgeo_api import config
14 |
15 | import_processes = {}
16 |
17 | async def upload_csv_to_db_with_latitude_and_longitude(
18 | file_path: str,
19 | new_table_id: str,
20 | latitude: str,
21 | longitude: str,
22 | table_columns: list,
23 | app: FastAPI
24 | ) -> None:
25 | """
26 | Method to upload data from from a csv file with latitude and longitude columns into db.
27 |
28 | """
29 |
30 | pd.options.display.max_rows = 10
31 |
32 | data_frame = pd.read_csv(file_path)
33 |
34 | columns = ""
35 |
36 | formatted_table_columns = ""
37 |
38 | for col in table_columns:
39 | formatted_table_columns += f"{utilities.remove_bad_characters(col)},"
40 |
41 | formatted_table_columns = formatted_table_columns[:-1]
42 |
43 | create_table_sql = f"""CREATE TABLE user_data."{new_table_id}" ("""
44 |
45 | for name, data_type in data_frame.dtypes.iteritems():
46 | columns += f"{utilities.remove_bad_characters(name)},"
47 | create_table_sql += f'"{utilities.remove_bad_characters(name)}"'
48 | if data_type == "object" or data_type == "datetime64":
49 | create_table_sql += " text,"
50 | if data_type == "int64":
51 | create_table_sql += " integer,"
52 | if data_type == "float64":
53 | create_table_sql += " double precision,"
54 |
55 | create_table_sql = create_table_sql[:-1]
56 |
57 | columns = columns[:-1]
58 |
59 | create_table_sql += ");"
60 |
61 | pool = app.state.database
62 |
63 | async with pool.acquire() as con:
64 | await con.fetch(f"""DROP TABLE IF EXISTS user_data."{new_table_id}";""")
65 |
66 | await con.fetch(create_table_sql)
67 |
68 | insert_sql = f"""COPY user_data."{new_table_id}"({columns})
69 | FROM '{file_path}'
70 | DELIMITER ','
71 | CSV HEADER;"""
72 |
73 | await con.fetch(insert_sql)
74 |
75 | add_geom_sql = f"""
76 | SELECT AddGeometryColumn ('user_data','{new_table_id}','geom',4326,'POINT',2);
77 | """
78 |
79 | await con.fetch(add_geom_sql)
80 |
81 | update_geom_sql = f"""
82 | UPDATE user_data."{new_table_id}"
83 | SET geom = ST_SetSRID(ST_MakePoint({longitude},{latitude}), 4326);
84 | """
85 |
86 | await con.fetch(update_geom_sql)
87 |
88 | await clean_up_table(
89 | table_id=new_table_id,
90 | app=app
91 | )
92 |
93 | media_directory = os.listdir(f"{os.getcwd()}/media/")
94 | for file in media_directory:
95 | if new_table_id in file:
96 | os.remove(f"{os.getcwd()}/media/{file}")
97 |
98 | async def upload_csv_to_db_with_geographic_data(
99 | file_path: str,
100 | new_table_id: str,
101 | map_name: str,
102 | map_column: str,
103 | table_column: str,
104 | table_columns: list,
105 | map_columns: list,
106 | app: FastAPI
107 | ) -> None:
108 | """
109 | Method to upload data from from a csv file with geographic data into db.
110 |
111 | """
112 |
113 | pd.options.display.max_rows = 10
114 |
115 | data_frame = pd.read_csv(file_path)
116 |
117 | table_column = utilities.remove_bad_characters(table_column)
118 |
119 | columns = ""
120 |
121 | formatted_table_columns = ""
122 |
123 | formatted_map_columns = ""
124 |
125 | for col in table_columns:
126 | if col not in map_columns:
127 | formatted_table_columns += f"a.{utilities.remove_bad_characters(col)},"
128 |
129 | for column in map_columns:
130 | formatted_map_columns += f"b.{utilities.remove_bad_characters(column)},"
131 |
132 | create_table_sql = f"""CREATE TABLE user_data."{new_table_id}_temp" ("""
133 |
134 | for name, data_type in data_frame.dtypes.iteritems():
135 | columns += f"{utilities.remove_bad_characters(name)},"
136 | create_table_sql += f'"{utilities.remove_bad_characters(name)}"'
137 | if utilities.remove_bad_characters(name) == table_column:
138 | create_table_sql += " text,"
139 | else:
140 | if data_type == "object" or data_type == "datetime64":
141 | create_table_sql += " text,"
142 | if data_type == "int64":
143 | create_table_sql += " integer,"
144 | if data_type == "float64":
145 | create_table_sql += " double precision,"
146 |
147 | create_table_sql = create_table_sql[:-1]
148 | columns = columns[:-1]
149 |
150 | create_table_sql += ");"
151 |
152 | pool = app.state.database
153 |
154 | async with pool.acquire() as con:
155 | await con.fetch(f"""DROP TABLE IF EXISTS user_data."{new_table_id}_temp";""")
156 |
157 | await con.fetch(create_table_sql)
158 |
159 | insert_sql = f"""COPY user_data."{new_table_id}_temp" ({columns})
160 | FROM '{file_path}'
161 | DELIMITER ','
162 | CSV HEADER;"""
163 |
164 | await con.fetch(insert_sql)
165 |
166 | join_sql = f"""CREATE TABLE user_data."{new_table_id}" AS
167 | SELECT {formatted_table_columns} {formatted_map_columns} geom
168 | FROM user_data."{new_table_id}_temp" as a
169 | LEFT JOIN user_data."{map_name}" as b
170 | ON a."{table_column}" = b."{map_column}";
171 | """
172 |
173 | await con.fetch(join_sql)
174 |
175 | await con.fetch(f"""DROP TABLE IF EXISTS user_data."{new_table_id}_temp";""")
176 |
177 | await clean_up_table(
178 | table_id=new_table_id,
179 | app=app
180 | )
181 |
182 | media_directory = os.listdir(f"{os.getcwd()}/media/")
183 | for file in media_directory:
184 | if new_table_id in file:
185 | os.remove(f"{os.getcwd()}/media/{file}")
186 |
187 | async def validate_table(
188 | table_id: str,
189 | app: FastAPI
190 | ) -> bool:
191 | pool = app.state.database
192 |
193 | async with pool.acquire() as con:
194 | exists = await con.fetchrow(f"""
195 | SELECT EXISTS (
196 | SELECT FROM
197 | pg_tables
198 | WHERE
199 | schemaname = 'user_data' AND
200 | tablename = '{table_id}'
201 | );
202 | """)
203 |
204 | if exists['exists']:
205 | count = await con.fetchrow(f"""
206 | SELECT COUNT(*)
207 | FROM user_data.{table_id}
208 | """)
209 |
210 | if count['count'] > 0:
211 | return True
212 |
213 | return False
214 |
215 |
216 | async def get_arcgis_data(
217 | url: str,
218 | table_id: str,
219 | process_id: str,
220 | username: str,
221 | title: str,
222 | tags: list,
223 | description: str,
224 | read_access_list: list,
225 | write_access_list: list,
226 | searchable: bool,
227 | app: FastAPI,
228 | token: str=None,
229 | filter: str="1=1"
230 | ) -> None:
231 | """
232 | Method get arcgis data from a given url and load it into a database.
233 |
234 | """
235 |
236 | start = datetime.datetime.now()
237 |
238 | try:
239 | service_url = f"{url}?f=json"
240 |
241 | if token is not None:
242 | service_url += f"&token={token}"
243 |
244 | async with aiohttp.ClientSession() as session:
245 |
246 | async with session.get(service_url) as resp:
247 |
248 | data = await resp.json()
249 |
250 | max_number_of_features_per_query = data['maxRecordCount']
251 |
252 | feature_stats_url = f"{url}/query?where={filter}&returnGeometry=false&returnIdsOnly=true&f=json"
253 |
254 | async with session.get(feature_stats_url) as feature_resp:
255 |
256 | data = await feature_resp.text()
257 |
258 | data = json.loads(data)
259 |
260 | object_ids = data['objectIds']
261 |
262 | number_of_features = len(data['objectIds'])
263 |
264 | error = ""
265 |
266 | if number_of_features <= max_number_of_features_per_query:
267 |
268 | async with session.get(f"{url}/query?where={filter}&outFields=*&returnGeometry=true&geometryPrecision=6&outSR=4326&f=geojson") as resp:
269 |
270 | data = await resp.json()
271 |
272 | if 'error' in data:
273 | error = data['error']
274 |
275 | with open(f'{table_id}.geojson', 'w') as json_file:
276 | json.dump(data, json_file)
277 |
278 | else:
279 | start_counter = 0
280 |
281 | feature_collection = {
282 | "type": "FeatureCollection",
283 | "features": []
284 | }
285 |
286 | for x in range(
287 | start_counter,
288 | number_of_features,
289 | max_number_of_features_per_query
290 | ):
291 | ids_requested = object_ids[x: x + max_number_of_features_per_query ]
292 | payload = {
293 | 'f': 'geojson',
294 | 'where': filter,
295 | 'objectIds': str( ids_requested )[1:-1],
296 | 'outSR': '4326',
297 | 'returnGeometry': 'true',
298 | 'outFields': '*',
299 | 'geometryPrecision': '4'
300 | }
301 |
302 | async with session.post( f"{url}/query", data=payload ) as resp:
303 |
304 | data = await resp.json()
305 |
306 | if 'error' in data:
307 | error = data['error']
308 |
309 | feature_collection['features'] += data['features']
310 |
311 | with open(f'{table_id}.geojson', 'w') as json_file:
312 | json.dump(feature_collection, json_file)
313 |
314 | load_geographic_data_to_server(
315 | table_id=table_id,
316 | file_path=f'{table_id}.geojson'
317 | )
318 |
319 | valid_table = await validate_table(
320 | table_id=table_id,
321 | app=app
322 | )
323 |
324 | if valid_table:
325 |
326 | await clean_up_table(
327 | table_id=table_id,
328 | app=app
329 | )
330 |
331 | item = {
332 | "user_id": username,
333 | "table_id": table_id,
334 | "title": title,
335 | "tags": tags,
336 | "description": description,
337 | "read_access_list": read_access_list,
338 | "write_access_list": write_access_list,
339 | "searchable": searchable
340 | }
341 |
342 | await utilities.create_single_item_in_database(
343 | item=item,
344 | model_name="Table"
345 | )
346 |
347 | import_processes[process_id]['status'] = "SUCCESS"
348 | import_processes[process_id]['table_id'] = table_id
349 | else:
350 | import_processes[process_id]['status'] = "FAILURE"
351 | import_processes[process_id]['error'] = f"No data within ArcGIS Service. Error: {str(error)}"
352 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
353 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
354 | except Exception as error:
355 | if os.path.exists(f'{table_id}.geojson'):
356 | os.remove(f'{table_id}.geojson')
357 | import_processes[process_id]['status'] = "FAILURE"
358 | import_processes[process_id]['error'] = str(error)
359 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
360 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
361 |
362 | async def upload_geographic_file(
363 | file_path: str,
364 | new_table_id: str,
365 | process_id: str,
366 | username: str,
367 | title: str,
368 | tags: list,
369 | description: str,
370 | read_access_list: list,
371 | write_access_list: list,
372 | searchable: bool,
373 | app: FastAPI
374 | ) -> None:
375 | """
376 | Method to upload data from geographic file.
377 |
378 | """
379 |
380 | start = datetime.datetime.now()
381 |
382 | try:
383 | load_geographic_data_to_server(
384 | table_id=new_table_id,
385 | file_path=file_path
386 | )
387 |
388 | valid_table = await validate_table(
389 | table_id=new_table_id,
390 | app=app
391 | )
392 |
393 | if valid_table:
394 |
395 | await clean_up_table(
396 | table_id=new_table_id,
397 | app=app
398 | )
399 |
400 | item = {
401 | "username": username,
402 | "table_id": new_table_id,
403 | "title": title,
404 | "tags": tags,
405 | "description": description,
406 | "read_access_list": read_access_list,
407 | "write_access_list": write_access_list,
408 | "searchable": searchable
409 | }
410 |
411 | await utilities.create_single_item_in_database(
412 | item=item,
413 | model_name="Table"
414 | )
415 |
416 | import_processes[process_id]['status'] = "SUCCESS"
417 | import_processes[process_id]['new_table_id'] = new_table_id
418 | else:
419 | import_processes[process_id]['status'] = "FAILURE"
420 | import_processes[process_id]['error'] = "No data within files loaded."
421 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
422 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
423 | except Exception as error:
424 | media_directory = os.listdir(f"{os.getcwd()}/media/")
425 | for file in media_directory:
426 | if new_table_id in file:
427 | os.remove(f"{os.getcwd()}/media/{file}")
428 | import_processes[process_id]['status'] = "FAILURE"
429 | import_processes[process_id]['error'] = str(error)
430 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
431 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
432 |
433 | async def import_geographic_data_from_csv(
434 | file_path: str,
435 | new_table_id: str,
436 | process_id: str,
437 | map_name: str,
438 | map_column: str,
439 | table_column: str,
440 | table_columns: list,
441 | map_columns: list,
442 | username: str,
443 | title: str,
444 | tags: list,
445 | description: str,
446 | read_access_list: list,
447 | write_access_list: list,
448 | searchable: bool,
449 | app: FastAPI
450 | ) -> None:
451 | """
452 | Method to upload data from from a csv file with geographic data.
453 |
454 | """
455 |
456 | start = datetime.datetime.now()
457 |
458 | try:
459 | await upload_csv_to_db_with_geographic_data(
460 | file_path=file_path,
461 | new_table_id=new_table_id,
462 | map_name=map_name,
463 | map_column=map_column,
464 | table_column=table_column,
465 | table_columns=table_columns,
466 | map_columns=map_columns,
467 | app=app
468 | )
469 |
470 | item = {
471 | "username": username,
472 | "table_id": new_table_id,
473 | "title": title,
474 | "tags": tags,
475 | "description": description,
476 | "read_access_list": read_access_list,
477 | "write_access_list": write_access_list,
478 | "searchable": searchable
479 | }
480 |
481 | await utilities.create_single_item_in_database(
482 | item=item,
483 | model_name="Table"
484 | )
485 | import_processes[process_id]['status'] = "SUCCESS"
486 | import_processes[process_id]['new_table_id'] = new_table_id
487 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
488 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
489 | except Exception as error:
490 | media_directory = os.listdir(f"{os.getcwd()}/media/")
491 | for file in media_directory:
492 | if new_table_id in file:
493 | os.remove(f"{os.getcwd()}/media/{file}")
494 | import_processes[process_id]['status'] = "FAILURE"
495 | import_processes[process_id]['error'] = str(error)
496 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
497 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
498 |
499 | async def import_point_data_from_csv(
500 | file_path: str,
501 | new_table_id: str,
502 | process_id: str,
503 | latitude: str,
504 | longitude: str,
505 | table_columns: list,
506 | username: str,
507 | title: str,
508 | tags: list,
509 | description: str,
510 | read_access_list: list,
511 | write_access_list: list,
512 | searchable: bool,
513 | app: FastAPI
514 | ) -> None:
515 | """
516 | Method to upload data from csv with lat lng columns.
517 |
518 | """
519 |
520 | start = datetime.datetime.now()
521 |
522 | try:
523 | await upload_csv_to_db_with_latitude_and_longitude(
524 | file_path=file_path,
525 | new_table_id=new_table_id,
526 | latitude=latitude,
527 | longitude=longitude,
528 | table_columns=table_columns,
529 | app=app
530 | )
531 |
532 | item = {
533 | "username": username,
534 | "table_id": new_table_id,
535 | "title": title,
536 | "tags": tags,
537 | "description": description,
538 | "read_access_list": read_access_list,
539 | "write_access_list": write_access_list,
540 | "searchable": searchable
541 | }
542 |
543 | await utilities.create_single_item_in_database(
544 | item=item,
545 | model_name="Table"
546 | )
547 | import_processes[process_id]['status'] = "SUCCESS"
548 | import_processes[process_id]['new_table_id'] = new_table_id
549 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
550 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
551 | except Exception as error:
552 | media_directory = os.listdir(f"{os.getcwd()}/media/")
553 | for file in media_directory:
554 | if new_table_id in file:
555 | os.remove(f"{os.getcwd()}/media/{file}")
556 | import_processes[process_id]['status'] = "FAILURE"
557 | import_processes[process_id]['error'] = str(error)
558 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
559 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
560 |
561 | async def import_point_data_from_json_file(
562 | file_path: str,
563 | new_table_id: str,
564 | process_id: str,
565 | latitude: str,
566 | longitude: str,
567 | table_columns: list,
568 | username: str,
569 | title: str,
570 | tags: list,
571 | description: str,
572 | read_access_list: list,
573 | write_access_list: list,
574 | searchable: bool,
575 | app: FastAPI
576 | ) -> None:
577 | """
578 | Method to upload data from csv with lat lng columns.
579 |
580 | """
581 |
582 | start = datetime.datetime.now()
583 |
584 | try:
585 | df = pd.read_json(file_path)
586 |
587 | df.to_csv(f"{os.getcwd()}/media/{new_table_id}.csv", index=False, sep=',', encoding="utf-8")
588 |
589 | await upload_csv_to_db_with_latitude_and_longitude(
590 | file_path=f"{os.getcwd()}/media/{new_table_id}.csv",
591 | new_table_id=new_table_id,
592 | latitude=latitude,
593 | longitude=longitude,
594 | table_columns=table_columns,
595 | app=app
596 | )
597 |
598 | item = {
599 | "username": username,
600 | "table_id": new_table_id,
601 | "title": title,
602 | "tags": tags,
603 | "description": description,
604 | "read_access_list": read_access_list,
605 | "write_access_list": write_access_list,
606 | "searchable": searchable
607 | }
608 |
609 | await utilities.create_single_item_in_database(
610 | item=item,
611 | model_name="Table"
612 | )
613 | import_processes[process_id]['status'] = "SUCCESS"
614 | import_processes[process_id]['new_table_id'] = new_table_id
615 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
616 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
617 | except Exception as error:
618 | media_directory = os.listdir(f"{os.getcwd()}/media/")
619 | for file in media_directory:
620 | if new_table_id in file:
621 | os.remove(f"{os.getcwd()}/media/{file}")
622 | import_processes[process_id]['status'] = "FAILURE"
623 | import_processes[process_id]['error'] = str(error)
624 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
625 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
626 |
627 | async def import_geographic_data_from_json_file(
628 | file_path: str,
629 | new_table_id: str,
630 | process_id: str,
631 | map_name: str,
632 | map_column: str,
633 | table_column: str,
634 | table_columns: list,
635 | map_columns: list,
636 | username: str,
637 | title: str,
638 | tags: list,
639 | description: str,
640 | read_access_list: list,
641 | write_access_list: list,
642 | searchable: bool,
643 | app: FastAPI
644 | ) -> None:
645 | """
646 | Method to upload data from from a json file with geographic data.
647 |
648 | """
649 |
650 | start = datetime.datetime.now()
651 |
652 | try:
653 | df = pd.read_json(file_path)
654 |
655 | df.to_csv(f"{os.getcwd()}/media/{new_table_id}.csv", index=False, sep=',', encoding="utf-8")
656 |
657 | await upload_csv_to_db_with_geographic_data(
658 | file_path=f"{os.getcwd()}/media/{new_table_id}.csv",
659 | new_table_id=new_table_id,
660 | map_name=map_name,
661 | map_column=map_column,
662 | table_column=table_column,
663 | table_columns=table_columns,
664 | map_columns=map_columns,
665 | app=app
666 | )
667 |
668 | item = {
669 | "username": username,
670 | "table_id": new_table_id,
671 | "title": title,
672 | "tags": tags,
673 | "description": description,
674 | "read_access_list": read_access_list,
675 | "write_access_list": write_access_list,
676 | "searchable": searchable
677 | }
678 |
679 | await utilities.create_single_item_in_database(
680 | item=item,
681 | model_name="Table"
682 | )
683 | import_processes[process_id]['status'] = "SUCCESS"
684 | import_processes[process_id]['new_table_id'] = new_table_id
685 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
686 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
687 | except Exception as error:
688 | media_directory = os.listdir(f"{os.getcwd()}/media/")
689 | for file in media_directory:
690 | if new_table_id in file:
691 | os.remove(f"{os.getcwd()}/media/{file}")
692 | import_processes[process_id]['status'] = "FAILURE"
693 | import_processes[process_id]['error'] = str(error)
694 | import_processes[process_id]['completion_time'] = datetime.datetime.now()
695 | import_processes[process_id]['run_time_in_seconds'] = datetime.datetime.now()-start
696 |
697 | def load_geographic_data_to_server(
698 | table_id: str,
699 | file_path: str
700 | ) -> None:
701 | """
702 | Method used to load a geographic file into the database.
703 |
704 | """
705 | host = config.DB_HOST
706 | username = config.DB_USERNAME
707 | password = config.DB_PASSWORD
708 | database = config.DB_DATABASE
709 | subprocess.call(f'ogr2ogr -f "PostgreSQL" "PG:host={host} user={username} dbname={database} password={password} port={config.DB_PORT}" "{file_path}" -lco GEOMETRY_NAME=geom -lco FID=gid -nlt PROMOTE_TO_MULTI -lco PRECISION=no -nln user_data.{table_id} -overwrite', shell=True)
710 | media_directory = os.listdir(f"{os.getcwd()}/media/")
711 | for file in media_directory:
712 | if table_id in file:
713 | os.remove(f"{os.getcwd()}/media/{file}")
714 | async def clean_up_table(
715 | table_id: str,
716 | app: FastAPI
717 | ):
718 | """Method to clean up table in postgres after upload."""
719 |
720 |
721 | pool = app.state.database
722 |
723 | async with pool.acquire() as con:
724 |
725 | await con.fetch(f"""
726 | DELETE FROM user_data.{table_id}
727 | WHERE geom IS NULL;
728 | """)
729 |
730 | await con.fetch(f"""
731 | CREATE INDEX {table_id}_geom_idx
732 | ON user_data.{table_id}
733 | USING GIST (geom);
734 | """)
735 |
736 | await con.fetch(f"""
737 | CLUSTER user_data.{table_id}
738 | USING {table_id}_geom_idx;
739 | """)
740 |
741 | await con.fetch(f"""
742 | ALTER TABLE user_data.{table_id}
743 | DROP COLUMN IF EXISTS gid;
744 | """)
745 |
746 | await con.fetch(f"""
747 | ALTER TABLE user_data.{table_id}
748 | ADD COLUMN gid SERIAL PRIMARY KEY;
749 | """)
750 |
751 | await con.fetch(f"""
752 | UPDATE user_data.{table_id}
753 | SET geom = ST_Multi(ST_CollectionExtract(ST_MakeValid(geom)))
754 | WHERE ST_IsValid(geom) = false;
755 | """)
756 |
757 | await con.fetch(f"""
758 | VACUUM ANALYZE user_data.{table_id};
759 | """)
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/groups/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Groups"""
2 |
3 | import uuid
4 | from typing import List
5 | from fastapi import APIRouter, HTTPException, Depends, status
6 | from tortoise import exceptions
7 | from tortoise.expressions import Q
8 |
9 | from qwikgeo_api import db_models
10 | from qwikgeo_api import utilities
11 | from qwikgeo_api import authentication_handler
12 |
13 | router = APIRouter()
14 |
15 | @router.get(
16 | path="/",
17 | response_model=List[db_models.Group_Pydantic],
18 | responses={
19 | 500: {
20 | "description": "Internal Server Error",
21 | "content": {
22 | "application/json": {
23 | "Internal Server Error"
24 | }
25 | }
26 | }
27 | }
28 | )
29 | async def groups(
30 | q: str="",
31 | limit: int=10,
32 | offset: int=0,
33 | username: int=Depends(authentication_handler.JWTBearer())
34 | ):
35 | """Return a list of all groups."""
36 |
37 | query_filter = ""
38 |
39 | if q != "":
40 | query_filter = Q(name__icontains=q)
41 |
42 | items = await utilities.get_multiple_items_in_database(
43 | username=username,
44 | model_name="Group",
45 | query_filter=query_filter,
46 | limit=limit,
47 | offset=offset
48 | )
49 |
50 | return items
51 |
52 | @router.post(
53 | path="/",
54 | response_model=db_models.Group_Pydantic,
55 | responses={
56 | 400: {
57 | "description": "Group name already exist",
58 | "content": {
59 | "application/json": {
60 | "example": {"detail": "Group name already exist."}
61 | }
62 | }
63 | },
64 | 500: {
65 | "description": "Internal Server Error",
66 | "content": {
67 | "application/json": {
68 | "Internal Server Error"
69 | }
70 | }
71 | }
72 | }
73 | )
74 | async def create_group(
75 | group: db_models.Group_Pydantic,
76 | username: int=Depends(authentication_handler.JWTBearer())
77 | ):
78 | """Create a group."""
79 |
80 | try:
81 | user_in_group_users = False
82 | user_in_group_admins = False
83 |
84 | for name in group.group_users:
85 | if name.username == username:
86 | user_in_group_users = True
87 | try:
88 | await db_models.User.get(username=name.username)
89 | except exceptions.DoesNotExist as exc:
90 | raise HTTPException(
91 | status_code=status.HTTP_400_BAD_REQUEST,
92 | detail=f'Username: {name.username} does not exist.'
93 | ) from exc
94 |
95 | for name in group.group_admins:
96 | if name.username == username:
97 | user_in_group_admins = True
98 |
99 | if user_in_group_users is False:
100 | raise HTTPException(status_code=400, detail="User is not in group_users.")
101 |
102 | if user_in_group_admins is False:
103 | raise HTTPException(status_code=400, detail="User is not in group_admins.")
104 |
105 | new_group = await db_models.Group.create(
106 | name=group.name
107 | )
108 |
109 | for name in group.group_users:
110 | await db_models.GroupUser.create(username=name.username, group_id_id=new_group.group_id)
111 |
112 | for name in group.group_admins:
113 | await db_models.GroupAdmin.create(username=name.username, group_id_id=new_group.group_id)
114 |
115 | return await db_models.Group_Pydantic.from_tortoise_orm(new_group)
116 | except exceptions.IntegrityError as exc:
117 | raise HTTPException(status_code=400, detail="Group name already exist.") from exc
118 |
119 | @router.get(
120 | path="/{group_id}",
121 | response_model=db_models.Group_Pydantic,
122 | responses={
123 | 403: {
124 | "description": "Forbidden",
125 | "content": {
126 | "application/json": {
127 | "example": {"detail": "You do not have access to this group."}
128 | }
129 | }
130 | },
131 | 404: {
132 | "description": "Not Found",
133 | "content": {
134 | "application/json": {
135 | "example": {"detail": "Group not found."}
136 | }
137 | }
138 | },
139 | 500: {
140 | "description": "Internal Server Error",
141 | "content": {
142 | "application/json": {
143 | "Internal Server Error"
144 | }
145 | }
146 | }
147 | }
148 | )
149 | async def get_group(
150 | group_id: str,
151 | username: int=Depends(authentication_handler.JWTBearer())
152 | ):
153 | """Retrieve a group."""
154 |
155 | try:
156 | group = await db_models.Group_Pydantic.from_queryset_single(
157 | db_models.Group.get(group_id=group_id)
158 | )
159 | access = False
160 | for user in group.group_users:
161 | if user.username == username:
162 | access = True
163 | if access is False:
164 | raise HTTPException(status_code=403, detail="You do not have access to this group.")
165 | return group
166 | except exceptions.DoesNotExist as exc:
167 | raise HTTPException(status_code=404, detail="Group not found.") from exc
168 |
169 | @router.put(
170 | path="/{group_id}",
171 | response_model=db_models.Group_Pydantic,
172 | responses={
173 | 403: {
174 | "description": "Forbidden",
175 | "content": {
176 | "application/json": {
177 | "example": {"detail": "You do not have access to this group."}
178 | }
179 | }
180 | },
181 | 404: {
182 | "description": "Not Found",
183 | "content": {
184 | "application/json": {
185 | "example": {"detail": "Group not found."}
186 | }
187 | }
188 | },
189 | 500: {
190 | "description": "Internal Server Error",
191 | "content": {
192 | "application/json": {
193 | "Internal Server Error"
194 | }
195 | }
196 | }
197 | }
198 | )
199 | async def update_group(
200 | group_id: uuid.UUID,
201 | new_group: db_models.Group_Pydantic,
202 | username: int=Depends(authentication_handler.JWTBearer())
203 | ):
204 | """Update a group."""
205 |
206 | try:
207 | group = await db_models.Group_Pydantic.from_queryset_single(
208 | db_models.Group.get(group_id=group_id)
209 | )
210 | access = False
211 | for user in group.group_admins:
212 | if user.username == username:
213 | access = True
214 | if access is False:
215 | raise HTTPException(status_code=403, detail="You do not have admin access to this group.")
216 | await db_models.Group.filter(group_id=group_id).update(name=new_group.name)
217 | for name in new_group.group_users:
218 | await db_models.GroupUser.filter(
219 | id=name.id, group_id_id=group_id
220 | ).update(username=name.username)
221 | return await db_models.Group_Pydantic.from_queryset_single(
222 | db_models.Group.get(group_id=group_id)
223 | )
224 | except exceptions.DoesNotExist as exc:
225 | raise HTTPException(status_code=404, detail="Group not found.") from exc
226 |
227 | @router.delete(
228 | path="/{group_id}",
229 | responses={
230 | 200: {
231 | "description": "Successful Response",
232 | "content": {
233 | "application/json": {
234 | "example": {"status": True}
235 | }
236 | }
237 | },
238 | 403: {
239 | "description": "Forbidden",
240 | "content": {
241 | "application/json": {
242 | "example": {"detail": "You do not have access to this group."}
243 | }
244 | }
245 | },
246 | 404: {
247 | "description": "Not Found",
248 | "content": {
249 | "application/json": {
250 | "example": {"detail": "Group not found."}
251 | }
252 | }
253 | },
254 | 500: {
255 | "description": "Internal Server Error",
256 | "content": {
257 | "application/json": {
258 | "Internal Server Error"
259 | }
260 | }
261 | }
262 | }
263 | )
264 | async def delete_group(
265 | group_id: uuid.UUID,
266 | username: int=Depends(authentication_handler.JWTBearer())
267 | ):
268 | """Delete a group."""
269 |
270 | try:
271 | group = await db_models.Group_Pydantic.from_queryset_single(
272 | db_models.Group.get(group_id=group_id)
273 | )
274 | access = False
275 | for user in group.group_admins:
276 | if user.username == username:
277 | access = True
278 | if access is False:
279 | raise HTTPException(status_code=403, detail="You do not have admin access to this group.")
280 | deleted_count = await db_models.Group.filter(group_id=group_id).delete()
281 | if not deleted_count:
282 | raise HTTPException(status_code=404, detail="Group not found")
283 | return {"status": True}
284 | except exceptions.DoesNotExist as exc:
285 | raise HTTPException(status_code=404, detail="Group not found.") from exc
286 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/maps/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Maps - Models"""
2 |
3 | from enum import Enum
4 | from typing import List
5 | import uuid
6 | from pydantic import BaseModel, Extra
7 |
8 | class BasemapsEnum(str, Enum):
9 | Streets = 'streets'
10 | Outdoors = 'outdoors'
11 | Light = 'light'
12 | Dark = 'dark'
13 | Satellite = 'satellite'
14 | SatelliteStreets = 'satellite streets'
15 | Navigation = 'navigation'
16 |
17 | class MapTypesEnum(str, Enum):
18 | user_data = 'user_data'
19 | vector = 'vector'
20 | xyz = 'xyz'
21 | wms = 'wms'
22 | wmts = 'wmts'
23 | esri = 'esri'
24 |
25 | class GeometryTypesEnum(str, Enum):
26 | point = 'point'
27 | line = 'line'
28 | polygon = 'polygon'
29 | raster = 'raster'
30 |
31 | class Layer(BaseModel):
32 |
33 | title: str
34 | description: str
35 | mapbox_name: str
36 | map_type: MapTypesEnum
37 | geometry_type: GeometryTypesEnum
38 | style: object=None
39 | paint: object=None
40 | layout: object=None
41 | fill_paint: object=None
42 | border_paint: object=None
43 | bounding_box: list
44 |
45 | class Config:
46 | extra = Extra.allow # or 'allow' str
47 |
48 |
49 | class Map(BaseModel):
50 |
51 | pitch: int
52 | bearing: int
53 | basemap: BasemapsEnum
54 | bounding_box: list
55 | layers: List[Layer]
56 | read_access_list: list
57 | write_access_list: list
58 | notification_access_list: list
59 | username: str
60 | updated_username: str
61 | title: str
62 | description: str
63 | tags: list
64 | searchable: bool
65 |
66 | class Config:
67 | extra = Extra.allow # or 'allow' str
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/maps/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Maps"""
2 |
3 | import json
4 | from typing import List
5 | from functools import reduce
6 | from fastapi import APIRouter, Depends
7 | from tortoise.expressions import Q
8 |
9 | from qwikgeo_api import utilities
10 | from qwikgeo_api import db_models
11 | from qwikgeo_api import authentication_handler
12 | import qwikgeo_api.routers.items.maps.models as models
13 |
14 | router = APIRouter()
15 |
16 | @router.get(
17 | path="/",
18 | response_model=List[db_models.MapOut_Pydantic],
19 | responses={
20 | 403: {
21 | "description": "Forbidden",
22 | "content": {
23 | "application/json": {
24 | "example": {"detail": "No access to item."}
25 | }
26 | }
27 | },
28 | 404: {
29 | "description": "Not Found",
30 | "content": {
31 | "application/json": {
32 | "example": {"detail": "Item does not exist."}
33 | }
34 | }
35 | },
36 | 500: {
37 | "description": "Internal Server Error",
38 | "content": {
39 | "application/json": {
40 | "Internal Server Error"
41 | }
42 | }
43 | }
44 | }
45 | )
46 | async def get_maps(
47 | q: str=None,
48 | personal :bool=False,
49 | limit: int=10,
50 | offset : int=0,
51 | username: int=Depends(authentication_handler.JWTBearer())
52 | ):
53 | """
54 | Get a map.
55 | More information at https://docs.qwikgeo.com/maps/#map
56 | """
57 |
58 | user_groups = await utilities.get_user_groups(username)
59 |
60 | # if q:
61 | # if personal:
62 | # items = await utilities.get_multiple_items_in_database(
63 | # username=username,
64 | # model_name="MapOut",
65 | # limit=limit,
66 | # offset=offset,
67 | # query_filter=Q(reduce(lambda x, y: x | y, [Q(item__read_access_list__contains=[group]) for group in user_groups]),Q(description__icontains=q)|Q(title__icontains=q))
68 | # )
69 | # else:
70 | # items = await utilities.get_multiple_items_in_database(
71 | # username=username,
72 | # model_name="MapOut",
73 | # limit=limit,
74 | # offset=offset,
75 | # query_filter=Q(Q(description__icontains=q)|Q(title__icontains=q))
76 | # )
77 | # else:
78 | # if personal:
79 | # items = await utilities.get_multiple_items_in_database(
80 | # username=username,
81 | # model_name="MapOut",
82 | # limit=limit,
83 | # offset=offset,
84 | # query_filter=Q(reduce(lambda x, y: x | y, [Q(read_access_list__icontains=group) for group in user_groups]))
85 | # )
86 | # else:
87 | # items = await utilities.get_multiple_items_in_database(
88 | # username=username,
89 | # model_name="MapOut",
90 | # limit=limit,
91 | # offset=offset,
92 | # query_filter=Q(reduce(lambda x, y: x | y, [Q(item_read_access_list__contains=[group]) for group in user_groups]))
93 | # )
94 |
95 | items = await utilities.get_multiple_items_in_database(
96 | username=username,
97 | model_name="MapOut"
98 | )
99 |
100 | return items
101 |
102 | @router.get(
103 | path="/{map_id}",
104 | response_model=db_models.MapOut_Pydantic,
105 | responses={
106 | 403: {
107 | "description": "Forbidden",
108 | "content": {
109 | "application/json": {
110 | "example": {"detail": "No access to item."}
111 | }
112 | }
113 | },
114 | 404: {
115 | "description": "Not Found",
116 | "content": {
117 | "application/json": {
118 | "example": {"detail": "Item does not exist."}
119 | }
120 | }
121 | },
122 | 500: {
123 | "description": "Internal Server Error",
124 | "content": {
125 | "application/json": {
126 | "Internal Server Error"
127 | }
128 | }
129 | }
130 | }
131 | )
132 | async def get_map(
133 | map_id: str,
134 | username: int=Depends(authentication_handler.JWTBearer())
135 | ):
136 | """
137 | Get a map.
138 | More information at https://docs.qwikgeo.com/maps/#map
139 | """
140 |
141 | item = await utilities.get_item_in_database(
142 | username=username,
143 | model_name="MapOut",
144 | query_filter=Q(map_id=map_id)
145 | )
146 |
147 | return item
148 |
149 | @router.post(
150 | path="/",
151 | response_model=db_models.MapOut_Pydantic,
152 | responses={
153 | 500: {
154 | "description": "Internal Server Error",
155 | "content": {
156 | "application/json": {
157 | "Internal Server Error"
158 | }
159 | }
160 | }
161 | }
162 | )
163 | async def create_map(
164 | item: models.Map,
165 | username: int=Depends(authentication_handler.JWTBearer())
166 | ):
167 | """
168 | Create a new map.
169 | More information at https://docs.qwikgeo.com/maps/#create-map
170 | """
171 |
172 | json_item = json.loads(item.json())
173 |
174 | json_item['user_id'] = json_item['username']
175 |
176 | layers = json_item['layers']
177 |
178 | del json_item['layers']
179 |
180 | new_map = await utilities.create_single_item_in_database(
181 | item=json_item,
182 | model_name="Map"
183 | )
184 |
185 | for layer in layers:
186 | layer['map_id'] = new_map['map_id']
187 | await db_models.Layer.create(**layer)
188 |
189 | new_item = await utilities.get_item_in_database(
190 | username=username,
191 | model_name="MapOut",
192 | query_filter=Q(item_id=new_map.portal_id)
193 | )
194 |
195 | return new_item
196 |
197 | @router.put(
198 | path="/{map_id}",
199 | response_model=db_models.Map_Pydantic,
200 | responses={
201 | 500: {
202 | "description": "Internal Server Error",
203 | "content": {
204 | "application/json": {
205 | "Internal Server Error"
206 | }
207 | }
208 | }
209 | }
210 | )
211 | async def update_map(
212 | map_id: str,
213 | item: db_models.Map_Pydantic,
214 | username: int=Depends(authentication_handler.JWTBearer())
215 | ):
216 | """
217 | Create a new map.
218 | More information at https://docs.qwikgeo.com/maps/#update-map
219 | """
220 |
221 | await utilities.validate_item_access(
222 | model_name="Map",
223 | query_filter=Q(map_id=map_id),
224 | username=username,
225 | write_access=True
226 | )
227 |
228 | # map_item = db_models.Map_Pydantic(**item)
229 |
230 | await utilities.update_single_item_in_database(
231 | item=item,
232 | query_filter=Q(map_id=map_id),
233 | model_name="Map"
234 | )
235 |
236 | await db_models.Layer.filter(map_id=map_id).delete()
237 |
238 | json_item = json.loads(item.json())
239 |
240 | layers = json_item['layers']
241 |
242 | for layer in layers:
243 | layer['map_id'] = map_id
244 | await db_models.Layer.create(**layer)
245 |
246 | new_item = await utilities.get_item_in_database(
247 | username=username,
248 | model_name="Map",
249 | query_filter=Q(map_id=map_id)
250 | )
251 |
252 | return new_item
253 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Items"""
2 |
3 | from typing import List
4 | from fastapi import APIRouter, Depends
5 | from tortoise.expressions import Q
6 |
7 | from qwikgeo_api import utilities
8 | from qwikgeo_api import db_models
9 | from qwikgeo_api import authentication_handler
10 |
11 | router = APIRouter()
12 |
13 | @router.get(
14 | path="/",
15 | response_model=List[db_models.ItemOut_Pydantic],
16 | responses={
17 | 500: {
18 | "description": "Internal Server Error",
19 | "content": {
20 | "application/json": {
21 | "Internal Server Error"
22 | }
23 | }
24 | }
25 | }
26 | )
27 | async def items(
28 | username: int=Depends(authentication_handler.JWTBearer())
29 | ):
30 | """
31 | List all items.
32 | More information at https://docs.qwikgeo.com/items/#items
33 | """
34 |
35 | db_items = await utilities.get_multiple_items_in_database(
36 | username=username,
37 | model_name="ItemOut"
38 | )
39 |
40 | return db_items
41 |
42 | @router.get(
43 | path="/{item_id}",
44 | response_model=db_models.ItemOut_Pydantic,
45 | responses={
46 | 500: {
47 | "description": "Internal Server Error",
48 | "content": {
49 | "application/json": {
50 | "Internal Server Error"
51 | }
52 | }
53 | }
54 | }
55 | )
56 | async def item(
57 | item_id: str,
58 | username: int=Depends(authentication_handler.JWTBearer())
59 | ):
60 | """
61 | Get an item.
62 | More information at https://docs.qwikgeo.com/items/#item
63 | """
64 |
65 | db_item = await utilities.get_item_in_database(
66 | username=username,
67 | model_name="ItemOut",
68 | query_filter=Q(portal_id=item_id)
69 | )
70 |
71 | return db_item
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/tables/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Tables - Models"""
2 |
3 | from typing import Literal, List
4 | from pydantic import BaseModel, Field
5 |
6 | class Column(BaseModel):
7 | """Model for adding a column"""
8 |
9 | column_name: str
10 | column_type: Literal['text','integer','bigint','double precision','boolean','time','uuid']
11 |
12 | class CreateTable(BaseModel):
13 | """Model for creating a table"""
14 |
15 | title: str = Field(
16 | title="The name of the dataset within GeoPortal."
17 | )
18 | tags: list=[]
19 | description: str = Field(
20 | title="A description about the dataset.",
21 | default=""
22 | )
23 | read_access_list: list=[]
24 | write_access_list: list=[]
25 | searchable: bool=True
26 | columns: List[Column]
27 | geometry_type: Literal['POINT','LINESTRING','POLYGON']
28 | srid: int=4326
29 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/tables/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Tables"""
2 |
3 | import os
4 | import shutil
5 | from typing import List
6 | from fastapi import APIRouter, Request, Depends
7 | from tortoise.expressions import Q
8 |
9 | import qwikgeo_api.routers.items.tables.models as models
10 | from qwikgeo_api import utilities
11 | from qwikgeo_api import db_models
12 | from qwikgeo_api import authentication_handler
13 |
14 | router = APIRouter()
15 |
16 | @router.get(
17 | path="/",
18 | response_model=List[db_models.TableOut_Pydantic],
19 | responses={
20 | 500: {
21 | "description": "Internal Server Error",
22 | "content": {
23 | "application/json": {
24 | "Internal Server Error"
25 | }
26 | }
27 | }
28 | }
29 | )
30 | async def tables(
31 | username: int=Depends(authentication_handler.JWTBearer())
32 | ):
33 | """
34 | List all tables.
35 | More information at https://docs.qwikgeo.com/tables/#tables
36 | """
37 |
38 | items = await utilities.get_multiple_items_in_database(
39 | username=username,
40 | model_name="TableOut"
41 | )
42 |
43 | print(items)
44 |
45 | return items
46 |
47 | @router.get(
48 | path="/{table_id}",
49 | response_model=db_models.TableOut_Pydantic,
50 | responses={
51 | 403: {
52 | "description": "Forbidden",
53 | "content": {
54 | "application/json": {
55 | "example": {"detail": "No access to table."}
56 | }
57 | }
58 | },
59 | 404: {
60 | "description": "Not Found",
61 | "content": {
62 | "application/json": {
63 | "example": {"detail": "Table does not exist."}
64 | }
65 | }
66 | },
67 | 500: {
68 | "description": "Internal Server Error",
69 | "content": {
70 | "application/json": {
71 | "Internal Server Error"
72 | }
73 | }
74 | }
75 | }
76 | )
77 | async def table(
78 | table_id: str,
79 | username: int=Depends(authentication_handler.JWTBearer())
80 | ):
81 | """
82 | Get a table.
83 | More information at https://docs.qwikgeo.com/tables/#table
84 | """
85 |
86 | await utilities.validate_item_access(
87 | model_name="Table",
88 | query_filter=Q(table_id=table_id),
89 | username=username
90 | )
91 |
92 | item = await utilities.get_item_in_database(
93 | username=username,
94 | model_name="TableOut",
95 | query_filter=Q(table_id=table_id)
96 | )
97 |
98 | return item
99 |
100 |
101 |
102 | @router.post(
103 | path="/",
104 | responses={
105 | 200: {
106 | "description": "Successful Response",
107 | "content": {
108 | "application/json": {
109 | "example": {
110 | "status": True,
111 | "table_id": "string"
112 | }
113 | }
114 | }
115 | },
116 | 500: {
117 | "description": "Internal Server Error",
118 | "content": {
119 | "application/json": {
120 | "Internal Server Error"
121 | }
122 | }
123 | }
124 | }
125 | )
126 | async def create_table(
127 | request: Request,
128 | info: models.CreateTable,
129 | username: int=Depends(authentication_handler.JWTBearer())
130 | ):
131 | """
132 | Create a new table.
133 | More information at https://docs.qwikgeo.com/tables/#create-table
134 | """
135 |
136 | pool = request.app.state.database
137 |
138 | new_table_id = utilities.get_new_table_id()
139 |
140 | async with pool.acquire() as con:
141 |
142 | query = f"""
143 | CREATE TABLE user_data."{new_table_id}"(
144 | gid SERIAL PRIMARY KEY
145 | """
146 |
147 | for column in info.columns:
148 | query += f""", "{column.column_name}" {column.column_type} """
149 |
150 | query += ")"
151 |
152 | await con.fetch(query)
153 |
154 | geom_query = f"""
155 | SELECT AddGeometryColumn ('user_data','{new_table_id}','geom',{info.srid},'{info.geometry_type}',2);
156 | """
157 |
158 | await con.fetch(geom_query)
159 |
160 | utilities.check_if_username_in_access_list(username, info.read_access_list, "read")
161 |
162 | utilities.check_if_username_in_access_list(username, info.write_access_list, "write")
163 |
164 | item = {
165 | "username": username,
166 | "table_id": new_table_id,
167 | "title": info.title,
168 | "tags": info.tags,
169 | "description": info.description,
170 | "searchable": info.searchable,
171 | "read_access_list": info.read_access_list,
172 | "write_access_list": info.write_access_list
173 | }
174 |
175 | await utilities.create_single_item_in_database(
176 | item=item,
177 | model_name="Table"
178 | )
179 |
180 | return {"status": True, "table_id": new_table_id}
181 |
182 | @router.delete(
183 | path="/{table_id}",
184 | responses={
185 | 200: {
186 | "description": "Successful Response",
187 | "content": {
188 | "application/json": {
189 | "example": {"status": True}
190 | }
191 | }
192 | },
193 | 403: {
194 | "description": "Forbidden",
195 | "content": {
196 | "application/json": {
197 | "example": {"detail": "No access to table."}
198 | }
199 | }
200 | },
201 | 404: {
202 | "description": "Not Found",
203 | "content": {
204 | "application/json": {
205 | "example": {"detail": "Table does not exist."}
206 | }
207 | }
208 | },
209 | 500: {
210 | "description": "Internal Server Error",
211 | "content": {
212 | "application/json": {
213 | "Internal Server Error"
214 | }
215 | }
216 | }
217 | }
218 | )
219 | async def delete_table(
220 | request: Request,
221 | table_id: str,
222 | username: int=Depends(authentication_handler.JWTBearer())
223 | ):
224 | """
225 | Delete a table.
226 | More information at https://docs.qwikgeo.com/tables/#delete-table
227 | """
228 |
229 | await utilities.delete_single_item_in_database(
230 | username=username,
231 | model_name="Table",
232 | query_filter=Q(table_id=table_id)
233 | )
234 |
235 | pool = request.app.state.database
236 |
237 | async with pool.acquire() as con:
238 |
239 | query = f"""
240 | DROP TABLE IF EXISTS user_data."{table_id}";
241 | """
242 |
243 | await con.fetch(query)
244 |
245 | if os.path.exists(f'{os.getcwd()}/cache/user_data_{table_id}'):
246 | shutil.rmtree(f'{os.getcwd()}/cache/user_data_{table_id}')
247 |
248 | return {"status": True}
249 |
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/users/models.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Users - Models"""
2 |
3 | from typing import List
4 | from tortoise.contrib.pydantic import pydantic_model_creator
5 | from pydantic import BaseModel
6 |
7 | from qwikgeo_api import db_models
8 |
9 | class Status(BaseModel):
10 | """Model for returning a request with a message"""
11 |
12 | message: str
13 |
14 | class User(BaseModel):
15 | """Model for listing a user"""
16 |
17 | username: str
18 | first_name: str
19 | last_name: str
20 | photo_url: str=None
21 |
22 | User_Pydantic = pydantic_model_creator(db_models.User, name="User", exclude=("password_hash", ))
23 | UserIn_Pydantic = pydantic_model_creator(db_models.User, name="UserIn", exclude_readonly=True)
24 | UserOut_Pydantic = pydantic_model_creator(db_models.User, name="UserOut_Pydantic", exclude=("password_hash", "items", "tables", "maps",))
--------------------------------------------------------------------------------
/qwikgeo_api/routers/items/users/router.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Users"""
2 |
3 | from typing import List
4 | from passlib.hash import bcrypt
5 | from fastapi import APIRouter, HTTPException, Depends
6 | from tortoise import exceptions
7 |
8 | from qwikgeo_api import db_models
9 | import qwikgeo_api.routers.items.users.models as models
10 | from qwikgeo_api import authentication_handler
11 |
12 | router = APIRouter()
13 |
14 | @router.post(
15 | path="/",
16 | response_model=models.UserOut_Pydantic,
17 | responses={
18 | 400: {
19 | "description": "Bad Request",
20 | "content": {
21 | "application/json": {
22 | "example": {"detail": "Username already exist."}
23 | }
24 | }
25 | },
26 | 500: {
27 | "description": "Internal Server Error",
28 | "content": {
29 | "application/json": {
30 | "Internal Server Error"
31 | }
32 | }
33 | }
34 | }
35 | )
36 | async def create_user(
37 | user: models.UserIn_Pydantic
38 | ):
39 | """
40 | Create a new user.
41 | More information at https://docs.qwikgeo.com/users/#create-user
42 | """
43 |
44 | try:
45 | user_obj = db_models.User(
46 | username=user.username,
47 | password_hash=bcrypt.hash(user.password_hash),
48 | first_name=user.first_name,
49 | last_name=user.last_name,
50 | email=user.email,
51 | )
52 | await user_obj.save()
53 | return await models.UserOut_Pydantic.from_tortoise_orm(user_obj)
54 | except exceptions.IntegrityError as exc:
55 | raise HTTPException(status_code=400, detail="Username already exist.") from exc
56 |
57 | @router.get(
58 | "/me",
59 | response_model=models.UserOut_Pydantic,
60 | responses={
61 | 404: {
62 | "description": "Not Found",
63 | "content": {
64 | "application/json": {
65 | "example": {"detail": "User not found."}
66 | }
67 | }
68 | },
69 | 500: {
70 | "description": "Internal Server Error",
71 | "content": {
72 | "application/json": {
73 | "Internal Server Error"
74 | }
75 | }
76 | }
77 | }
78 | )
79 | async def get_user(
80 | username: int=Depends(authentication_handler.JWTBearer())
81 | ):
82 | """
83 | Retrieve information about user.
84 | More information at https://docs.qwikgeo.com/users/#user
85 | """
86 |
87 | try:
88 | user = await models.UserOut_Pydantic.from_queryset_single(
89 | db_models.User.get(username=username)
90 | )
91 | return user
92 | except exceptions.DoesNotExist as exc:
93 | raise HTTPException(status_code=404, detail="User not found.") from exc
94 |
95 | @router.put(
96 | path="/me",
97 | response_model=models.UserOut_Pydantic,
98 | responses={
99 | 404: {
100 | "description": "Not Found",
101 | "content": {
102 | "application/json": {
103 | "example": {"detail": "User not found."}
104 | }
105 | }
106 | },
107 | 500: {
108 | "description": "Internal Server Error",
109 | "content": {
110 | "application/json": {
111 | "Internal Server Error"
112 | }
113 | }
114 | }
115 | }
116 | )
117 | async def update_user(
118 | user: models.UserIn_Pydantic,
119 | username: int=Depends(authentication_handler.JWTBearer())
120 | ):
121 | """
122 | Update information about user.
123 | More information at https://docs.qwikgeo.com/users/#update-user
124 | """
125 |
126 | try:
127 | await db_models.User.filter(username=username).update(**user.dict(exclude_unset=True))
128 | return await models.UserOut_Pydantic.from_queryset_single(
129 | db_models.User.get(username=username)
130 | )
131 | except exceptions.DoesNotExist as exc:
132 | raise HTTPException(status_code=404, detail="User not found.") from exc
133 |
134 | @router.delete(
135 | path="/me",
136 | response_model=models.Status,
137 | responses={
138 | 200: {
139 | "description": "User deleted",
140 | "content": {
141 | "application/json": {
142 | "example": {"Deleted user."}
143 | }
144 | }
145 | },
146 | 404: {
147 | "description": "Not Found",
148 | "content": {
149 | "application/json": {
150 | "example": {"detail": "User not found."}
151 | }
152 | }
153 | },
154 | 500: {
155 | "description": "Internal Server Error",
156 | "content": {
157 | "application/json": {
158 | "Internal Server Error"
159 | }
160 | }
161 | }
162 | }
163 | )
164 | async def delete_user(
165 | username: int=Depends(authentication_handler.JWTBearer())
166 | ):
167 | """
168 | Delete a user.
169 | More information at https://docs.qwikgeo.com/users/#delete-user
170 | """
171 |
172 | deleted_count = await db_models.User.filter(username=username).delete()
173 | if not deleted_count:
174 | raise HTTPException(status_code=404, detail="User not found.")
175 | return models.Status(message="Deleted user.")
176 |
177 | @router.get(
178 | path="/",
179 | response_model=List[models.User],
180 | responses={
181 | 500: {
182 | "description": "Internal Server Error",
183 | "content": {
184 | "application/json": {
185 | "Internal Server Error"
186 | }
187 | }
188 | }
189 | }
190 | )
191 | async def get_users(
192 | q: str,
193 | username: int=Depends(authentication_handler.JWTBearer())
194 | ):
195 | """
196 | Return a list of users based off of searching via username.
197 | More information at https://docs.qwikgeo.com/users/#user-search
198 | """
199 |
200 | users= (
201 | await db_models.User.filter(username__icontains=q)
202 | )
203 |
204 | return users
205 |
--------------------------------------------------------------------------------
/qwikgeo_api/utilities.py:
--------------------------------------------------------------------------------
1 | """QwikGeo API - Utilities"""
2 |
3 | import os
4 | import json
5 | import random
6 | import re
7 | import string
8 | import uuid
9 | import datetime
10 | import subprocess
11 | import shutil
12 | from functools import reduce
13 | import jwt
14 | from fastapi.security import OAuth2PasswordBearer
15 | from fastapi import Depends, FastAPI, HTTPException, status
16 | from pygeofilter.backends.sql import to_sql_where
17 | from pygeofilter.parsers.ecql import parse
18 | import aiohttp
19 | import pandas as pd
20 | import tortoise
21 | from tortoise.query_utils import Prefetch
22 | from tortoise.expressions import Q
23 | from jwt.exceptions import ExpiredSignatureError, InvalidSignatureError, DecodeError
24 | import asyncpg
25 |
26 | from qwikgeo_api import db_models
27 | from qwikgeo_api import config
28 |
29 | import_processes = {}
30 |
31 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl='token')
32 |
33 | async def get_all_tables_from_db(
34 | app: FastAPI
35 | ) -> list:
36 | """
37 | Method to return list of tables in database.
38 |
39 | """
40 |
41 | pool = app.state.database
42 |
43 | db_tables = []
44 |
45 | async with pool.acquire() as con:
46 | tables_query = """
47 | SELECT tablename
48 | FROM pg_catalog.pg_tables
49 | WHERE schemaname = 'user_data'
50 | AND tablename != 'spatial_ref_sys';
51 | """
52 | tables = await con.fetch(tables_query)
53 |
54 | for table in tables:
55 | db_tables.append(table['tablename'])
56 |
57 | return db_tables
58 |
59 | def get_database_model_name(model_name):
60 | if model_name == "Table":
61 | return db_models.Table
62 | elif model_name == "TableOut":
63 | return db_models.Table
64 | elif model_name == "Item":
65 | return db_models.Item
66 | elif model_name == "ItemOut":
67 | return db_models.Item
68 | elif model_name == "Map":
69 | return db_models.Map
70 | elif model_name == "MapOut":
71 | return db_models.Map
72 | elif model_name == "Group":
73 | return db_models.Group
74 | elif model_name == "ItemReadAccessList":
75 | return db_models.ItemReadAccessList
76 | elif model_name == "ItemWriteAccessList":
77 | return db_models.ItemWriteAccessList
78 |
79 | def get_database_serializer_name(model_name):
80 | if model_name == "Table":
81 | return db_models.Table_Pydantic
82 | elif model_name == "TableOut":
83 | return db_models.TableOut_Pydantic
84 | elif model_name == "Item":
85 | return db_models.Item_Pydantic
86 | elif model_name == "ItemOut":
87 | return db_models.ItemOut_Pydantic
88 | elif model_name == "MapOut":
89 | return db_models.MapOut_Pydantic
90 | elif model_name == "Map":
91 | return db_models.Map_Pydantic
92 | elif model_name == "Group":
93 | return db_models.Group_Pydantic
94 |
95 | async def validate_item_access(
96 | query_filter,
97 | model_name: str,
98 | username: str,
99 | write_access: bool=False,
100 | ) -> bool:
101 | """
102 | Method to validate if user has access to item in portal.
103 |
104 | """
105 |
106 | database_model_name = get_database_model_name(model_name)
107 | database_model_serializer = get_database_serializer_name(model_name)
108 |
109 | try:
110 | if model_name in ['Item','ItemOut']:
111 | item = await db_models.Item_Pydantic.from_queryset_single(
112 | db_models.Item.get(query_filter)
113 | )
114 |
115 | else:
116 | table = await database_model_serializer.from_queryset_single(
117 | database_model_name.get(query_filter)
118 | )
119 |
120 | item = await db_models.Item_Pydantic.from_queryset_single(
121 | db_models.Item.get(portal_id=table.item.portal_id)
122 | )
123 |
124 | user_groups = await get_user_groups(username)
125 |
126 | access = False
127 |
128 | write_access_list = []
129 | read_access_list = []
130 |
131 | for access_item in item.item_write_access_list:
132 | write_access_list.append(access_item.name)
133 |
134 | for access_item in item.item_read_access_list:
135 | read_access_list.append(access_item.name)
136 |
137 | if write_access:
138 | if any(map(lambda v: v in user_groups, write_access_list)):
139 | access = True
140 | elif any(map(lambda v: v in user_groups, read_access_list)):
141 | access = True
142 |
143 | if access is False:
144 | raise HTTPException(
145 | status_code=status.HTTP_401_UNAUTHORIZED,
146 | detail='No access to item.'
147 | )
148 | except (
149 | tortoise.exceptions.ValidationError,
150 | tortoise.exceptions.OperationalError,
151 | tortoise.exceptions.DoesNotExist
152 | ) as exc:
153 | raise HTTPException(
154 | status_code=status.HTTP_404_NOT_FOUND,
155 | detail='Item does not exist.'
156 | ) from exc
157 |
158 | async def get_multiple_items_in_database(
159 | username: str,
160 | model_name: str,
161 | query_filter="",
162 | limit: int=10,
163 | offset: int=0
164 | ) -> object:
165 | """
166 | Method to get multiple items within the database of the portal.
167 |
168 | """
169 |
170 | database_model_name = get_database_model_name(model_name)
171 | database_model_serializer = get_database_serializer_name(model_name)
172 |
173 | user_groups = await get_user_groups(username)
174 |
175 | portal_items = await db_models.ItemReadAccessListPydantic.from_queryset(db_models.ItemReadAccessList.filter(
176 | reduce(lambda x, y: x | y, [Q(name=group) for group in user_groups])
177 | ))
178 |
179 | default_filter = None
180 |
181 | if model_name not in ["Group","ItemOut"]:
182 |
183 | portal_ids = []
184 |
185 | for portal_item in portal_items:
186 | portal_ids.append(portal_item.portal_id.portal_id)
187 |
188 | default_filter = Q(item_id__in=portal_ids)
189 |
190 | if model_name == "Item":
191 | default_filter = Q(portal_id__in=portal_ids)
192 |
193 | if query_filter != "":
194 | if default_filter == None:
195 | items = await database_model_serializer.from_queryset(
196 | database_model_name.filter(query_filter).limit(limit).offset(offset)
197 | )
198 | else:
199 | items = await database_model_serializer.from_queryset(
200 | database_model_name.filter(default_filter, query_filter).limit(limit).offset(offset)
201 | )
202 | else:
203 | if default_filter == None:
204 | items = await database_model_serializer.from_queryset(
205 | database_model_name.filter().limit(limit).offset(offset)
206 | )
207 | else:
208 | items = await database_model_serializer.from_queryset(
209 | database_model_name.filter(default_filter).limit(limit).offset(offset)
210 | )
211 |
212 | return items
213 |
214 | async def get_item_in_database(
215 | username: str,
216 | model_name: str,
217 | query_filter,
218 | write_access: bool=False
219 | ) -> object:
220 | """
221 | Method to get multiple items within the database of the portal.
222 |
223 | """
224 |
225 | await validate_item_access(
226 | query_filter=query_filter,
227 | model_name=model_name,
228 | username=username,
229 | write_access=write_access
230 | )
231 |
232 | database_model_name = get_database_model_name(model_name)
233 | database_model_serializer = get_database_serializer_name(model_name)
234 |
235 | portal_item = await database_model_serializer.from_queryset_single(
236 | database_model_name.get(query_filter)
237 | )
238 |
239 | if model_name not in ['Item','ItemOut']:
240 | item = await db_models.Item_Pydantic.from_queryset_single(
241 | db_models.Item.get(portal_id=portal_item.item.portal_id)
242 | )
243 |
244 | await db_models.Item.filter(
245 | portal_id=portal_item.item.portal_id
246 | ).update(views=item.views+1)
247 | else:
248 | await db_models.Item.filter(
249 | query_filter
250 | ).update(views=portal_item.views+1)
251 |
252 | return portal_item
253 |
254 | async def create_single_item_in_database(
255 | item,
256 | model_name: str
257 | ) -> object:
258 | """
259 | Method to create an item within the database of the portal.
260 |
261 | """
262 |
263 | database_model_name = get_database_model_name(model_name)
264 |
265 | db_item = await db_models.Item.create(
266 | user_id=item['user_id'],
267 | title=item['title'],
268 | tags=item['tags'],
269 | description=item['description'],
270 | views="1",
271 | searchable=item['searchable'],
272 | item_type=model_name.lower()
273 | )
274 |
275 | if item['read_access_list'] == []:
276 | item['read_access_list'] = [item['username']]
277 |
278 | if item['write_access_list'] == []:
279 | item['write_access_list'] = [item['username']]
280 |
281 | for name in item['read_access_list']:
282 | await db_models.ItemReadAccessList.create(name=name, portal_id_id=db_item.portal_id)
283 |
284 | for name in item['write_access_list']:
285 | await db_models.ItemWriteAccessList.create(name=name, portal_id_id=db_item.portal_id)
286 |
287 | item['item_id'] = db_item.portal_id
288 |
289 | await database_model_name.create(**item)
290 |
291 | return db_item
292 |
293 | async def update_single_item_in_database(
294 | item,
295 | query_filter,
296 | model_name: str
297 | ) -> object:
298 | """
299 | Method to update an item within the database of the portal.
300 |
301 | """
302 |
303 | database_model_name = get_database_model_name(model_name)
304 | database_model_serializer = get_database_serializer_name(model_name)
305 |
306 | await database_model_name.filter(query_filter).update(**item)
307 |
308 | return await database_model_serializer.from_queryset_single(database_model_name.get(query_filter))
309 |
310 | async def delete_single_item_in_database(
311 | username: str,
312 | query_filter,
313 | model_name: str
314 | ) -> object:
315 | """
316 | Method to delete an item within the database of the portal.
317 |
318 | """
319 |
320 | await validate_item_access(
321 | query_filter=query_filter,
322 | model_name=model_name,
323 | username=username,
324 | write_access=True
325 | )
326 |
327 | database_model_name = get_database_model_name(model_name)
328 | database_model_serializer = get_database_serializer_name(model_name)
329 |
330 | table_metadata = await database_model_serializer.from_queryset_single(
331 | database_model_name.get(query_filter)
332 | )
333 |
334 | item_metadata = await db_models.Item_Pydantic.from_queryset_single(
335 | db_models.Item.get(portal_id=table_metadata.portal_id.portal_id)
336 | )
337 |
338 | await db_models.Item.filter(portal_id=item_metadata.portal_id).delete()
339 |
340 | await database_model_name.filter(query_filter).delete()
341 |
342 | async def update_read_and_write_access_list(
343 | portal_id: str,
344 | read_access_list: list,
345 | write_access_list: list
346 | ):
347 | """
348 | Method to update the read and write access list for an item.
349 |
350 | """
351 | await db_models.ItemReadAccessList.filter(portal_id=portal_id).delete()
352 | await db_models.ItemWriteAccessList.filter(portal_id=portal_id).delete()
353 |
354 | for name in read_access_list:
355 | await db_models.ItemReadAccessList.create(name=name, portal_id_id=portal_id)
356 |
357 | for name in write_access_list:
358 | await db_models.ItemWriteAccessList.create(name=name, portal_id_id=portal_id)
359 |
360 | async def get_token_header(
361 | token: str=Depends(oauth2_scheme)
362 | ) -> str:
363 | """
364 | Method to return username via JWT token.
365 |
366 | """
367 |
368 | try:
369 | user = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
370 | except (ExpiredSignatureError, InvalidSignatureError, DecodeError) as exc:
371 | raise HTTPException(
372 | status_code=status.HTTP_401_UNAUTHORIZED,
373 | detail=f"JWT Error: {str(exc)}"
374 | ) from exc
375 | return user['username']
376 |
377 | async def get_user_groups(
378 | username: str
379 | ) -> list:
380 | """
381 | Method to return a list of groups a user has access to within the portal.
382 |
383 | """
384 |
385 | groups_plus_username = [username]
386 |
387 | groups = (
388 | await db_models.Group.all()
389 | .prefetch_related(Prefetch(
390 | "group_users", queryset=db_models.GroupUser.filter(username=username)
391 | ))
392 | )
393 |
394 | for group in groups:
395 | groups_plus_username.append(group.name)
396 |
397 | return groups_plus_username
398 |
399 | async def authenticate_user(
400 | username: str,
401 | password: str
402 | ) -> object:
403 | """
404 | Method to validate a user via their username and password and return
405 | the user's information.
406 |
407 | """
408 |
409 | try:
410 | user = await db_models.User.get(username=username)
411 | except tortoise.exceptions.DoesNotExist as exc:
412 | raise HTTPException(
413 | status_code=status.HTTP_401_UNAUTHORIZED,
414 | detail='Invalid username or password.'
415 | ) from exc
416 | if not user:
417 | raise HTTPException(
418 | status_code=status.HTTP_401_UNAUTHORIZED,
419 | detail='Invalid username or password.'
420 | )
421 | if not user.verify_password(password):
422 | raise HTTPException(
423 | status_code=status.HTTP_401_UNAUTHORIZED,
424 | detail='Invalid username or password.'
425 | )
426 | return user
427 |
428 | async def get_tile(
429 | table_id: str,
430 | tile_matrix_set_id: str,
431 | z: int,
432 | x: int,
433 | y: int,
434 | fields: str,
435 | cql_filter: str,
436 | app: FastAPI
437 | ) -> bytes:
438 | """
439 | Method to return vector tile from database.
440 |
441 | """
442 |
443 | cache_file = f'{os.getcwd()}/cache/user_data_{table_id}/{tile_matrix_set_id}/{z}/{x}/{y}'
444 |
445 | if os.path.exists(cache_file):
446 | return '', True
447 |
448 | pool = app.state.database
449 |
450 | async with pool.acquire() as con:
451 |
452 |
453 | sql_field_query = f"""
454 | SELECT column_name
455 | FROM information_schema.columns
456 | WHERE table_name = '{table_id}'
457 | AND column_name != 'geom';
458 | """
459 |
460 | field_mapping = {}
461 |
462 | db_fields = await con.fetch(sql_field_query)
463 |
464 | for field in db_fields:
465 | field_mapping[field['column_name']] = field['column_name']
466 |
467 | if fields is None:
468 | field_list = ""
469 |
470 | for field in db_fields:
471 | column = field['column_name']
472 | field_list += f', "{column}"'
473 | else:
474 | field_list = f',"{fields}"'
475 |
476 | sql_vector_query = f"""
477 | SELECT ST_AsMVT(tile, 'user_data.{table_id}', 4096)
478 | FROM (
479 | WITH
480 | bounds AS (
481 | SELECT ST_TileEnvelope({z}, {x}, {y}) as geom
482 | )
483 | SELECT
484 | ST_AsMVTGeom(
485 | ST_Transform("table".geom, 3857)
486 | ,bounds.geom
487 | ) AS mvtgeom {field_list}
488 | FROM user_data.{table_id} as "table", bounds
489 | WHERE ST_Intersects(
490 | ST_Transform("table".geom, 4326),
491 | ST_Transform(bounds.geom, 4326)
492 | )
493 |
494 | """
495 | if cql_filter:
496 | ast = parse(cql_filter)
497 | where_statement = to_sql_where(ast, field_mapping)
498 | sql_vector_query += f" AND {where_statement}"
499 |
500 | sql_vector_query += f"LIMIT {config.MAX_FEATURES_PER_TILE}) as tile"
501 |
502 | tile = await con.fetchval(sql_vector_query)
503 |
504 | if fields is None and cql_filter is None and config.CACHE_AGE_IN_SECONDS > 0:
505 |
506 | cache_file_dir = f'{os.getcwd()}/cache/user_data_{table_id}/{tile_matrix_set_id}/{z}/{x}'
507 |
508 | if not os.path.exists(cache_file_dir):
509 | try:
510 | os.makedirs(cache_file_dir)
511 | except OSError:
512 | pass
513 |
514 | with open(cache_file, "wb") as file:
515 | file.write(tile)
516 | file.close()
517 |
518 | return tile, False
519 |
520 | async def get_table_geometry_type(
521 | table_id: str,
522 | app: FastAPI
523 | ) -> list:
524 | """
525 | Method used to retrieve the geometry type for a given table.
526 |
527 | """
528 |
529 | pool = app.state.database
530 |
531 | async with pool.acquire() as con:
532 | geometry_query = f"""
533 | SELECT ST_GeometryType(geom) as geom_type
534 | FROM user_data.{table_id}
535 | """
536 | try:
537 | geometry_type = await con.fetchval(geometry_query)
538 | except asyncpg.exceptions.UndefinedTableError:
539 | return "unknown"
540 |
541 | if geometry_type is None:
542 | return "unknown"
543 |
544 |
545 | geom_type = 'point'
546 |
547 | if 'Polygon' in geometry_type:
548 | geom_type = 'polygon'
549 | elif 'Line' in geometry_type:
550 | geom_type = 'line'
551 |
552 | return geom_type
553 |
554 | async def get_table_center(
555 | table_id: str,
556 | app: FastAPI
557 | ) -> list:
558 | """
559 | Method used to retrieve the table center for a given table.
560 |
561 | """
562 |
563 | pool = app.state.database
564 |
565 | async with pool.acquire() as con:
566 | query = f"""
567 | SELECT ST_X(ST_Centroid(ST_Union(geom))) as x,
568 | ST_Y(ST_Centroid(ST_Union(geom))) as y
569 | FROM user_data.{table_id}
570 | """
571 | center = await con.fetch(query)
572 |
573 | return [center[0][0],center[0][1]]
574 |
575 | async def generate_where_clause(
576 | info: object,
577 | con,
578 | no_where: bool=False
579 | ) -> str:
580 | """
581 | Method to generate where clause.
582 |
583 | """
584 |
585 | query = ""
586 |
587 | if info.filter:
588 | sql_field_query = f"""
589 | SELECT column_name
590 | FROM information_schema.columns
591 | WHERE table_name = '{info.table}'
592 | AND column_name != 'geom';
593 | """
594 |
595 | db_fields = await con.fetch(sql_field_query)
596 |
597 | field_mapping = {}
598 |
599 | for field in db_fields:
600 | field_mapping[field['column_name']] = field['column_name']
601 |
602 | ast = parse(info.filter)
603 | filter = to_sql_where(ast, field_mapping)
604 |
605 | if no_where is False:
606 | query += " WHERE "
607 | else:
608 | query += " AND "
609 | query += f" {filter}"
610 |
611 | if info.coordinates and info.geometry_type and info.spatial_relationship:
612 | if info.filter:
613 | query += " AND "
614 | else:
615 | if no_where is False:
616 | query += " WHERE "
617 | if info.geometry_type == 'POLYGON':
618 | query += f"{info.spatial_relationship}(ST_GeomFromText('{info.geometry_type}(({info.coordinates}))',4326) ,{info.table}.geom)"
619 | else:
620 | query += f"{info.spatial_relationship}(ST_GeomFromText('{info.geometry_type}({info.coordinates})',4326) ,{info.table}.geom)"
621 |
622 | return query
623 |
624 | def get_new_table_id() -> str:
625 | """
626 | Method to return a new table id.
627 |
628 | """
629 | letters = string.ascii_lowercase
630 |
631 | return ''.join(random.choice(letters) for i in range(50))
632 |
633 | def get_new_process_id() -> str:
634 | """
635 | Method to return a new process id.
636 |
637 | """
638 |
639 | return str(uuid.uuid4())
640 |
641 | def remove_bad_characters(
642 | string_of_characters: str
643 | ) -> str:
644 | """
645 | Method remove bad characters from a string.
646 |
647 | """
648 |
649 | string_of_characters = string_of_characters.strip()
650 | regex = re.compile('[^a-zA-Z0-9_]')
651 | return regex.sub('_', string_of_characters).lower()
652 |
653 | async def get_table_columns(
654 | table_id: str,
655 | app: FastAPI,
656 | new_table_name: str=None
657 | ) -> list:
658 | """
659 | Method to return a list of columns for a table.
660 |
661 | """
662 |
663 | pool = app.state.database
664 |
665 | async with pool.acquire() as con:
666 |
667 |
668 | sql_field_query = f"""
669 | SELECT column_name
670 | FROM information_schema.columns
671 | WHERE table_name = '{table_id}'
672 | AND column_name != 'geom';
673 | """
674 |
675 | db_fields = await con.fetch(sql_field_query)
676 |
677 | fields = []
678 |
679 | for field in db_fields:
680 | if new_table_name:
681 | column_name = field['column_name']
682 | fields.append(f"{new_table_name}.{column_name}")
683 | else:
684 | fields.append(field['column_name'])
685 |
686 | return fields
687 |
688 | async def get_table_geojson(
689 | table_id: str,
690 | app: FastAPI,
691 | filter: str=None,
692 | bbox :str=None,
693 | limit: int=200000,
694 | offset: int=0,
695 | properties: str="*",
696 | sortby: str="gid",
697 | sortdesc: int=1,
698 | srid: int=4326,
699 | return_geometry: bool=True
700 | ) -> object:
701 | """
702 | Method used to retrieve the table geojson.
703 |
704 | """
705 |
706 | pool = app.state.database
707 |
708 | async with pool.acquire() as con:
709 | if return_geometry:
710 | query = """
711 | SELECT
712 | json_build_object(
713 | 'type', 'FeatureCollection',
714 | 'features', json_agg(ST_AsGeoJSON(t.*)::json)
715 | )
716 | FROM (
717 | """
718 |
719 | if properties != '*' and properties != "":
720 | query += f"SELECT {properties},ST_Transform(geom,{srid})"
721 | else:
722 | query += f"SELECT ST_Transform(geom,{srid}), gid"
723 |
724 | else:
725 | if properties != '*' and properties != "":
726 | query = f"SELECT {properties}, gid"
727 | else:
728 | query = f"SELECT gid"
729 |
730 | query += f" FROM user_data.{table_id} "
731 |
732 | count_query = f"""SELECT COUNT(*) FROM user_data.{table_id} """
733 |
734 | if filter != "" :
735 | query += f"WHERE {filter}"
736 | count_query += f"WHERE {filter}"
737 |
738 | if bbox is not None:
739 | if filter != "":
740 | query += " AND "
741 | count_query += " AND "
742 | else:
743 | query += " WHERE "
744 | count_query += " WHERE "
745 | coords = bbox.split(',')
746 | query += f" ST_INTERSECTS(geom,ST_MakeEnvelope({coords[0]}, {coords[1]}, {coords[2]}, {coords[3]}, 4326)) "
747 | count_query += f" ST_INTERSECTS(geom,ST_MakeEnvelope({coords[0]}, {coords[1]}, {coords[2]}, {coords[3]}, 4326)) "
748 |
749 | if sortby != "gid":
750 | sort = "asc"
751 | if sortdesc != 1:
752 | sort = "desc"
753 | query += f" ORDER BY {sortby} {sort}"
754 |
755 | query += f" OFFSET {offset} LIMIT {limit}"
756 |
757 | if return_geometry:
758 |
759 | query += ") AS t;"
760 |
761 | try:
762 | if return_geometry:
763 | geojson = await con.fetchrow(query)
764 | else:
765 | featuresJson = await con.fetch(query)
766 | except asyncpg.exceptions.InvalidTextRepresentationError as error:
767 | raise HTTPException(
768 | status_code=400,
769 | detail=str(error)
770 | )
771 | except asyncpg.exceptions.UndefinedFunctionError as error:
772 | raise HTTPException(
773 | status_code=400,
774 | detail=str(error)
775 | )
776 | count = await con.fetchrow(count_query)
777 |
778 | if return_geometry:
779 |
780 | formatted_geojson = json.loads(geojson['json_build_object'])
781 |
782 | if formatted_geojson['features'] is not None:
783 | for feature in formatted_geojson['features']:
784 | if 'st_transform' in feature['properties']:
785 | del feature['properties']['st_transform']
786 | if 'geom' in feature['properties']:
787 | del feature['properties']['geom']
788 | feature['id'] = feature['properties']['gid']
789 | if properties == "":
790 | feature['properties'].pop("gid")
791 | else:
792 |
793 | formatted_geojson = {
794 | "type": "FeatureCollection",
795 | "features": []
796 | }
797 |
798 | for feature in featuresJson:
799 | geojsonFeature = {
800 | "type": "Feature",
801 | "geometry": None,
802 | "properties": {},
803 | "id": feature['gid']
804 | }
805 | featureProperties = dict(feature)
806 | for property in featureProperties:
807 | if property not in ['geom', 'st_transform']:
808 | geojsonFeature['properties'][property] = featureProperties[property]
809 | if properties == "":
810 | geojsonFeature['properties'].pop("gid")
811 | formatted_geojson['features'].append(geojsonFeature)
812 |
813 | formatted_geojson['numberMatched'] = count['count']
814 | formatted_geojson['numberReturned'] = 0
815 | if formatted_geojson['features'] is not None:
816 | formatted_geojson['numberReturned'] = len(formatted_geojson['features'])
817 |
818 | return formatted_geojson
819 |
820 | async def get_table_bounds(
821 | table_id: str,
822 | app: FastAPI
823 | ) -> list:
824 | """
825 | Method used to retrieve the bounds for a given table.
826 |
827 | """
828 |
829 | pool = app.state.database
830 |
831 | async with pool.acquire() as con:
832 |
833 | query = f"""
834 | SELECT ST_Extent(geom)
835 | FROM user_data.{table_id}
836 | """
837 |
838 | table_extent = []
839 |
840 | try:
841 | extent = await con.fetchval(query)
842 | except asyncpg.exceptions.UndefinedTableError:
843 | return []
844 |
845 | if extent is None:
846 | return []
847 |
848 | extent = extent.replace('BOX(','').replace(')','')
849 |
850 | for corner in extent.split(','):
851 | table_extent.append(float(corner.split(' ')[0]))
852 | table_extent.append(float(corner.split(' ')[1]))
853 |
854 | return table_extent
855 |
856 | def delete_user_tile_cache(
857 | table_id: str
858 | ) -> None:
859 | """
860 | Method to remove tile cache for a user's table
861 |
862 | """
863 |
864 | if os.path.exists(f'{os.getcwd()}/cache/user_data_{table_id}'):
865 | shutil.rmtree(f'{os.getcwd()}/cache/user_data_{table_id}')
866 |
867 | def check_if_username_in_access_list(
868 | username: str,
869 | access_list: list,
870 | type: str
871 | ):
872 | if username not in access_list:
873 | raise HTTPException(
874 | status_code=status.HTTP_400_BAD_REQUEST,
875 | detail=f'{username} is not in {type}_access_list, add {username} to {type}_access_list.'
876 | )
877 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aerich==0.6.3
2 | aiofiles==0.8.0
3 | aiohttp==3.8.5
4 | aiosignal==1.2.0
5 | aiosqlite==0.17.0
6 | anyio==3.6.1
7 | async-timeout==4.0.2
8 | asyncpg==0.26.0
9 | attrs==22.1.0
10 | Authlib==1.0.1
11 | beautifulsoup4==4.11.1
12 | bcrypt==4.0.0
13 | cachetools==5.2.0
14 | certifi==2023.7.22
15 | cffi==1.15.1
16 | charset-normalizer==2.1.1
17 | click==8.1.3
18 | cryptography==41.0.3
19 | dateparser==1.1.1
20 | dictdiffer==0.9.0
21 | fastapi==0.80.0
22 | frozenlist==1.3.1
23 | google-api-core==2.10.0
24 | google-api-python-client==2.58.0
25 | google-auth==2.11.0
26 | google-auth-httplib2==0.1.0
27 | googleapis-common-protos==1.56.4
28 | h11==0.12.0
29 | httpcore==0.15.0
30 | httpx==0.23.0
31 | idna==3.3
32 | iso8601==1.0.2
33 | lark==0.12.0
34 | multidict==6.0.2
35 | numpy==1.23.2
36 | pandas==1.4.3
37 | passlib==1.7.4
38 | prometheus-client==0.14.1
39 | prometheus-fastapi-instrumentator==5.9.1
40 | protobuf==4.21.6
41 | pyasn1==0.4.8
42 | pyasn1-modules==0.2.8
43 | pycparser==2.21
44 | pydantic==1.9.2
45 | pygeofilter==0.1.2
46 | pygeoif==0.7
47 | PyJWT==2.4.0
48 | pyparsing==3.0.9
49 | pypika-tortoise==0.1.6
50 | python-dateutil==2.8.2
51 | python-dotenv==0.20.0
52 | python-multipart==0.0.5
53 | pytz==2022.2.1
54 | pytz-deprecation-shim==0.1.0.post0
55 | regex==2022.3.2
56 | requests==2.31.0
57 | rfc3986==1.5.0
58 | rsa==4.9
59 | Shapely==1.8.4
60 | six==1.16.0
61 | sniffio==1.2.0
62 | soupsieve==2.3.2.post1
63 | starlette==0.27.0
64 | tomlkit==0.11.4
65 | tortoise-orm==0.19.2
66 | typing-extensions==4.3.0
67 | tzdata==2022.2
68 | tzlocal==4.2
69 | uritemplate==4.1.1
70 | urllib3==1.26.12
71 | uvicorn==0.18.3
72 | yarl==1.8.1
73 |
--------------------------------------------------------------------------------