├── .flake8
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ └── lint.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CONTRIBUTING.md
├── License.txt
├── README.md
├── environment.yml
├── notebooks
└── examples
│ ├── Basic Track Analysis - Pyspark.ipynb
│ ├── Basic Track Analysis.ipynb
│ ├── Create Track Lines From Points.ipynb
│ ├── Create an Aggregated Map Service.ipynb
│ ├── Find Dwell Times at Polygons.ipynb
│ ├── Identify Inspected Buildings.ipynb
│ ├── Location Tracking Status.ipynb
│ ├── Proximity Tracing.ipynb
│ ├── Quickstart Guide.ipynb
│ ├── Visualize Route Deviance.ipynb
│ └── users.csv
├── readmes
├── check_edit_location.md
├── export_tracks.md
├── generate_users_arcade_expression.md
├── mirror_lkl_layer.md
└── polygon_cleanup_tracks.md
└── scripts
├── check_edit_location.py
├── export_tracks.py
├── generate_users_arcade_expression.py
├── mirror_lkl_layer.py
└── polygon_cleanup_tracks.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 160
3 | max-complexity = 18
4 | ignore = E722, W503, W504, F811
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 | on: [pull_request]
3 |
4 | jobs:
5 | lint:
6 | runs-on: ubuntu-latest
7 | steps:
8 | - uses: actions/checkout@v2
9 | - name: Set up Python 3.7
10 | uses: actions/setup-python@v1
11 | with:
12 | python-version: 3.7
13 | - name: Setup flake8 annotations
14 | uses: rbialon/flake8-annotations@v1
15 | - name: Lint with flake8
16 | run: |
17 | pip install flake8
18 | flake8 . --count --show-source --statistics
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask stuff:
57 | instance/
58 | .webassets-cache
59 |
60 | # Scrapy stuff:
61 | .scrapy
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | # IPython Notebook
70 | .ipynb_checkpoints
71 |
72 | # pyenv
73 | .python-version
74 |
75 | # celery beat schedule file
76 | celerybeat-schedule
77 |
78 | # dotenv
79 | .env
80 |
81 | # virtualenv
82 | venv/
83 | ENV/
84 |
85 | # Spyder project settings
86 | .spyderproject
87 |
88 | # Rope project settings
89 | .ropeproject
90 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://gitlab.com/pycqa/flake8
3 | rev: 3.8.3
4 | hooks:
5 | - id: flake8
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing).
--------------------------------------------------------------------------------
/License.txt:
--------------------------------------------------------------------------------
1 | Apache License - 2.0
2 |
3 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
4 |
5 | 1. Definitions.
6 |
7 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
8 |
9 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
10 |
11 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control
12 | with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management
13 | of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial
14 | ownership of such entity.
15 |
16 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
17 |
18 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source,
19 | and configuration files.
20 |
21 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to
22 | compiled object code, generated documentation, and conversions to other media types.
23 |
24 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice
25 | that is included in or attached to the work (an example is provided in the Appendix below).
26 |
27 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the
28 | editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes
29 | of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of,
30 | the Work and Derivative Works thereof.
31 |
32 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work
33 | or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual
34 | or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of
35 | electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on
36 | electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for
37 | the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing
38 | by the copyright owner as "Not a Contribution."
39 |
40 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and
41 | subsequently incorporated within the Work.
42 |
43 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual,
44 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display,
45 | publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
46 |
47 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide,
48 | non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell,
49 | sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are
50 | necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was
51 | submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work
52 | or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You
53 | under this License for that Work shall terminate as of the date such litigation is filed.
54 |
55 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications,
56 | and in Source or Object form, provided that You meet the following conditions:
57 |
58 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and
59 |
60 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and
61 |
62 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices
63 | from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
64 |
65 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a
66 | readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the
67 | Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the
68 | Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever
69 | such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License.
70 | You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work,
71 | provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to
72 | Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your
73 | modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with
74 | the conditions stated in this License.
75 |
76 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You
77 | to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above,
78 | nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
79 |
80 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except
81 | as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
82 |
83 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides
84 | its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation,
85 | any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for
86 | determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under
87 | this License.
88 |
89 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required
90 | by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages,
91 | including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the
92 | use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or
93 | any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
94 |
95 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a
96 | fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting
97 | such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree
98 | to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your
99 | accepting any such warranty or additional liability.
100 |
101 | END OF TERMS AND CONDITIONS
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Tracker Scripts
2 | A set of Python scripts and notebooks to help administer track views and analyze track data
3 |
4 | ### Features
5 |
6 | Several example Jupyter notebooks and scripts are provided to demonstrate some more advanced workflows that are possible via the ArcGIS API for Python and Tracker.
7 |
8 | Notebooks:
9 | - [Quickstart Guide](notebooks/examples/Quickstart%20Guide.ipynb)
10 | - [Basic Track Analysis](notebooks/examples/Basic%20Track%20Analysis.ipynb)
11 | - [Basic Track Analysis - PySpark](notebooks/examples/Basic%20Track%20Analysis%20-%20Pyspark.ipynb)
12 | - [Location Tracking Status](notebooks/examples/Location%20Tracking%20Status.ipynb)
13 | - [Creating Track Lines](notebooks/examples/Create%20Track%20Lines%20From%20Points.ipynb)
14 | - [Visualize Route Deviance](notebooks/examples/Visualize%20Route%20Deviance.ipynb)
15 | - [Identify Inspected Buildings](notebooks/examples/Identify%20Inspected%20Buildings.ipynb)
16 | - [Find Dwell Times at Polygons](notebooks/examples/Find%20Dwell%20Times%20at%20Polygons.ipynb)
17 | - [Proximity Tracing](notebooks/examples/Proximity%20Tracing.ipynb)
18 | - [Create an Aggregated Map Service](notebooks/examples/Create%20an%20Aggregated%20Map%20Service.ipynb)
19 |
20 | In addition, we have uploaded our AGOL-compatible notebooks into a publicly accessible [group of Hosted Notebooks in ArcGIS Online](https://www.arcgis.com/home/group.html?id=0bfc8729753f419b82365200fc09b076#overview)
21 |
22 | Scripts:
23 | - [Check Edit Location](scripts/check_edit_location.py) - [README here](readmes/check_edit_location.md)
24 | - [Mirror LKL Layer](scripts/mirror_lkl_layer.py) - [README here](readmes/mirror_lkl_layer.md)
25 | - [Polygon Cleanup Tracks](scripts/polygon_cleanup_tracks.py) - [README here](readmes/polygon_cleanup_tracks.md)
26 | - [Generate Users Arcade Expression](scripts/generate_users_arcade_expression.py) - [README here](readmes/generate_users_arcade_expression.md)
27 | - [Export Tracks From AGOL](scripts/export_tracks.py) - [README here](readmes/export_tracks.md)
28 |
29 |
30 | ### Instructions
31 |
32 | To run locally:
33 | 1. Install [Anaconda](https://www.anaconda.com/distribution)
34 | 2. Run `conda env create --file environment.yml` to create the virtual environment with the correct dependencies
35 | 3. Run `conda activate tracker-scripts` to activate the environment
36 | 4. Start the jupyter server using `jupyter notebook` or run the Python script from command line
37 | 5. Open the notebook, modify it, and then it run it
38 | 6. (Optional - dev only) Configure pre-commit to run flake8 linting on pushes
39 | * `pre-commit install --hook-type pre-push`
40 |
41 | To run in ArcGIS Notebooks:
42 | 1. Visit our [AGOL Hosted Notebooks group](https://www.arcgis.com/home/group.html?id=0bfc8729753f419b82365200fc09b076#overview)
43 | 2. Click on "Content"
44 | 3. Choose a notebook you'd like
45 | 4. Click on the thumbnail for "Open Notebook" to open the Notebook in ArcGIS Notebooks
46 |
47 | ### Requirements
48 | - [Anaconda](https://www.anaconda.com/distribution) must be installed
49 | - Web browser capable of running jupyter notebooks
50 |
51 | ## Resources
52 |
53 | * [ArcGIS API for Python](https://developers.arcgis.com/python)
54 | * [Tracker for ArcGIS](https://www.esri.com/en-us/arcgis/products/tracker-for-arcgis/overview)
55 |
56 | ## Issues
57 |
58 | Although we do our best to ensure these scripts and notebooks work as expected, they are provided as is and there is no official support.
59 |
60 | If you find a bug, please let us know by submitting an issue.
61 |
62 | ## Contributing
63 |
64 | Esri welcomes contributions from anyone and everyone.
65 | Please see our [guidelines for contributing](https://github.com/esri/contributing).
66 |
67 | ## Licensing
68 |
69 | Copyright 2020 Esri
70 |
71 | Licensed under the Apache License, Version 2.0 (the "License");
72 | you may not use this file except in compliance with the License.
73 | You may obtain a copy of the License at
74 |
75 | http://www.apache.org/licenses/LICENSE-2.0
76 |
77 | Unless required by applicable law or agreed to in writing, software
78 | distributed under the License is distributed on an "AS IS" BASIS,
79 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
80 | See the License for the specific language governing permissions and
81 | limitations under the License.
82 |
83 | A copy of the license is available in the repository's
84 | [LICENSE](License.txt) file.
85 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: tracker-scripts
2 | channels:
3 | - esri
4 | - conda-forge
5 | dependencies:
6 | - arcgis>=1.8.3
7 | - pendulum=1.4.4
8 | - pip>=19.1.1
9 | - python=3.6.8
10 | - seaborn=0.9.0
11 | - shapely=1.6.4
12 | - flake8>=3.8.3
13 | - pre-commit>=2.7.1
14 | - pip:
15 | - pyqrcode==1.2.1
16 | - pypng==0.0.18
17 |
--------------------------------------------------------------------------------
/notebooks/examples/Location Tracking Status.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "## Location Tracking Status\n",
8 | "A guide showing how to quickly view some details of the location tracking capability in your organization\n",
9 | "\n",
10 | "This guide assumes that:\n",
11 | "1. You are an administrator for your organization\n",
12 | "2. You are using Enterprise 10.7+\n",
13 | "3. You are running this with the latest version of the ArcGIS API for Python"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 1,
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "from arcgis.gis import GIS\n",
23 | "\n",
24 | "gis = GIS(\"https://server.domain.com/webadapter\", \"admin\", \"password\", verfiy_cert=False)\n",
25 | "lt = gis.admin.location_tracking"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "### Check Current Status of Location Tracking\n",
33 | "Let's quickly check to see the status of location tracking for our organization"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 2,
39 | "metadata": {},
40 | "outputs": [
41 | {
42 | "name": "stdout",
43 | "output_type": "stream",
44 | "text": [
45 | "Status: enabled\n",
46 | "Retention Period: 30 DAYS (disabled)\n"
47 | ]
48 | }
49 | ],
50 | "source": [
51 | "print(f\"Status: {lt.status}\")\n",
52 | "print(f\"Retention Period: {lt.retention_period} {lt.retention_period_units} ({'enabled' if lt.retention_period_enabled else 'disabled'})\")"
53 | ]
54 | },
55 | {
56 | "cell_type": "markdown",
57 | "metadata": {},
58 | "source": [
59 | "### Check Licenses\n",
60 | "Let's see how many licenses have been assigned"
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "execution_count": 3,
66 | "metadata": {},
67 | "outputs": [
68 | {
69 | "data": {
70 | "text/html": [
71 | "
\n",
72 | "\n",
85 | "
\n",
86 | " \n",
87 | " \n",
88 | " | \n",
89 | " Entitlement | \n",
90 | " Total | \n",
91 | " Assigned | \n",
92 | " Remaining | \n",
93 | "
\n",
94 | " \n",
95 | " \n",
96 | " \n",
97 | " 0 | \n",
98 | " tracker | \n",
99 | " 200 | \n",
100 | " 129 | \n",
101 | " 71 | \n",
102 | "
\n",
103 | " \n",
104 | "
\n",
105 | "
"
106 | ],
107 | "text/plain": [
108 | " Entitlement Total Assigned Remaining\n",
109 | "0 tracker 200 129 71"
110 | ]
111 | },
112 | "execution_count": 3,
113 | "metadata": {},
114 | "output_type": "execute_result"
115 | }
116 | ],
117 | "source": [
118 | "gis.admin.license.get('Tracker for ArcGIS').report"
119 | ]
120 | },
121 | {
122 | "cell_type": "markdown",
123 | "metadata": {},
124 | "source": [
125 | "### Check Active Users\n",
126 | "Let's see how many people have actually recorded tracks"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": 4,
132 | "metadata": {},
133 | "outputs": [
134 | {
135 | "name": "stdout",
136 | "output_type": "stream",
137 | "text": [
138 | "Users: 148\n"
139 | ]
140 | }
141 | ],
142 | "source": [
143 | "users = lt.tracks_layer.query(group_by_fields_for_statistics=\"created_user\", \n",
144 | " out_statistics=[{\"statisticType\": \"count\", \"onStatisticField\": \"objectid\", \"outStatisticFieldName\": \"count\"}],\n",
145 | " order_by=\"count\",\n",
146 | " as_df=True)\n",
147 | "print(f\"Users: {len(users)}\")"
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "Let's see who the top 5 users are (based on how many tracks points they have uploaded)"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 5,
160 | "metadata": {},
161 | "outputs": [
162 | {
163 | "data": {
164 | "text/html": [
165 | "\n",
166 | "\n",
179 | "
\n",
180 | " \n",
181 | " \n",
182 | " | \n",
183 | " count | \n",
184 | " created_user | \n",
185 | "
\n",
186 | " \n",
187 | " \n",
188 | " \n",
189 | " 0 | \n",
190 | " 536228 | \n",
191 | " apulver | \n",
192 | "
\n",
193 | " \n",
194 | " 1 | \n",
195 | " 424272 | \n",
196 | " cgillgrass | \n",
197 | "
\n",
198 | " \n",
199 | " 2 | \n",
200 | " 383522 | \n",
201 | " tmorey | \n",
202 | "
\n",
203 | " \n",
204 | " 3 | \n",
205 | " 267023 | \n",
206 | " jshaner | \n",
207 | "
\n",
208 | " \n",
209 | " 4 | \n",
210 | " 247850 | \n",
211 | " jwhitney | \n",
212 | "
\n",
213 | " \n",
214 | "
\n",
215 | "
"
216 | ],
217 | "text/plain": [
218 | " count created_user\n",
219 | "0 536228 apulver\n",
220 | "1 424272 cgillgrass\n",
221 | "2 383522 tmorey\n",
222 | "3 267023 jshaner\n",
223 | "4 247850 jwhitney"
224 | ]
225 | },
226 | "execution_count": 5,
227 | "metadata": {},
228 | "output_type": "execute_result"
229 | }
230 | ],
231 | "source": [
232 | "users.head(5)"
233 | ]
234 | },
235 | {
236 | "cell_type": "markdown",
237 | "metadata": {},
238 | "source": [
239 | "Let's see who the top 5 users were during the last week"
240 | ]
241 | },
242 | {
243 | "cell_type": "code",
244 | "execution_count": 6,
245 | "metadata": {},
246 | "outputs": [
247 | {
248 | "data": {
249 | "text/html": [
250 | "\n",
251 | "\n",
264 | "
\n",
265 | " \n",
266 | " \n",
267 | " | \n",
268 | " count | \n",
269 | " created_user | \n",
270 | "
\n",
271 | " \n",
272 | " \n",
273 | " \n",
274 | " 0 | \n",
275 | " 32894 | \n",
276 | " apulver | \n",
277 | "
\n",
278 | " \n",
279 | " 1 | \n",
280 | " 19445 | \n",
281 | " jshaner | \n",
282 | "
\n",
283 | " \n",
284 | " 2 | \n",
285 | " 9470 | \n",
286 | " cgillgrass | \n",
287 | "
\n",
288 | " \n",
289 | " 3 | \n",
290 | " 9244 | \n",
291 | " bszukalski | \n",
292 | "
\n",
293 | " \n",
294 | " 4 | \n",
295 | " 8282 | \n",
296 | " igomez | \n",
297 | "
\n",
298 | " \n",
299 | "
\n",
300 | "
"
301 | ],
302 | "text/plain": [
303 | " count created_user\n",
304 | "0 32894 apulver\n",
305 | "1 19445 jshaner\n",
306 | "2 9470 cgillgrass\n",
307 | "3 9244 bszukalski\n",
308 | "4 8282 igomez"
309 | ]
310 | },
311 | "execution_count": 6,
312 | "metadata": {},
313 | "output_type": "execute_result"
314 | }
315 | ],
316 | "source": [
317 | "from datetime import datetime, timedelta\n",
318 | "d = datetime.utcnow()-timedelta(days=7)\n",
319 | "users = lt.tracks_layer.query(where=f\"location_timestamp >= timestamp '{d.strftime('%Y-%m-%d %H:%M:%S')}'\",\n",
320 | " group_by_fields_for_statistics=\"created_user\", \n",
321 | " out_statistics=[{\"statisticType\": \"count\", \"onStatisticField\": \"objectid\", \"outStatisticFieldName\": \"count\"}],\n",
322 | " order_by=\"count\",\n",
323 | " as_df=True)\n",
324 | "users.head(5)"
325 | ]
326 | },
327 | {
328 | "cell_type": "markdown",
329 | "metadata": {},
330 | "source": [
331 | "### Check total number of tracks\n",
332 | "Let's see how many tracks are available"
333 | ]
334 | },
335 | {
336 | "cell_type": "code",
337 | "execution_count": 7,
338 | "metadata": {},
339 | "outputs": [
340 | {
341 | "name": "stdout",
342 | "output_type": "stream",
343 | "text": [
344 | "Total Tracks: 4,629,564\n"
345 | ]
346 | }
347 | ],
348 | "source": [
349 | "count = lt.tracks_layer.query(return_count_only=True)\n",
350 | "print(f\"Total Tracks: {count:,}\")"
351 | ]
352 | },
353 | {
354 | "cell_type": "markdown",
355 | "metadata": {},
356 | "source": [
357 | "### Check number of track views\n",
358 | "Let's see how many track views are in my organization"
359 | ]
360 | },
361 | {
362 | "cell_type": "code",
363 | "execution_count": 8,
364 | "metadata": {},
365 | "outputs": [
366 | {
367 | "name": "stdout",
368 | "output_type": "stream",
369 | "text": [
370 | "Track Views: 10\n"
371 | ]
372 | }
373 | ],
374 | "source": [
375 | "items = gis.content.search(\"typekeywords:'Location Tracking View'\")\n",
376 | "print(f\"Track Views: {len(items)}\")"
377 | ]
378 | }
379 | ],
380 | "metadata": {
381 | "kernelspec": {
382 | "display_name": "Python 3",
383 | "language": "python",
384 | "name": "python3"
385 | },
386 | "language_info": {
387 | "codemirror_mode": {
388 | "name": "ipython",
389 | "version": 3
390 | },
391 | "file_extension": ".py",
392 | "mimetype": "text/x-python",
393 | "name": "python",
394 | "nbconvert_exporter": "python",
395 | "pygments_lexer": "ipython3",
396 | "version": "3.6.8"
397 | }
398 | },
399 | "nbformat": 4,
400 | "nbformat_minor": 2
401 | }
402 |
--------------------------------------------------------------------------------
/notebooks/examples/Quickstart Guide.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Quickstart Guide\n",
8 | "\n",
9 | "This guide will demonstrates how to quickly:\n",
10 | "- Add users from a CSV file to your organization\n",
11 | "- Assign each user a Tracker for ArcGIS License\n",
12 | "- Create a track view that includes mobile users and track viewers\n",
13 | "- Generate a QR Code for quick sign in on the Android and iOS apps"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 1,
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "from arcgis.gis import GIS\n",
23 | "from arcgis.apps.tracker import TrackView\n",
24 | "from arcgis.apps import build_tracker_url\n",
25 | "import csv\n",
26 | "import pandas as pd\n",
27 | "import pyqrcode\n",
28 | "\n",
29 | "admin_username = 'admin'\n",
30 | "admin_password = 'password'\n",
31 | "org = 'https://server.domain.com/webadapter'\n",
32 | "users_csv = 'users.csv'\n",
33 | "track_view_name = \"Track View 1\"\n",
34 | "\n",
35 | "gis = GIS(org, admin_username, admin_password, verify_cert=False)"
36 | ]
37 | },
38 | {
39 | "cell_type": "markdown",
40 | "metadata": {},
41 | "source": [
42 | "First we'll read the CSV file using pandas"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 2,
48 | "metadata": {},
49 | "outputs": [
50 | {
51 | "data": {
52 | "text/html": [
53 | "\n",
54 | "\n",
67 | "
\n",
68 | " \n",
69 | " \n",
70 | " | \n",
71 | " Email | \n",
72 | " First Name | \n",
73 | " Last Name | \n",
74 | " Username | \n",
75 | " Password | \n",
76 | " Role | \n",
77 | " User Type | \n",
78 | " Track Viewer | \n",
79 | "
\n",
80 | " \n",
81 | " \n",
82 | " \n",
83 | " 0 | \n",
84 | " myemail@gmail.com | \n",
85 | " Jane | \n",
86 | " Doe | \n",
87 | " janedoe | \n",
88 | " password1 | \n",
89 | " Viewer | \n",
90 | " Viewer | \n",
91 | " No | \n",
92 | "
\n",
93 | " \n",
94 | " 1 | \n",
95 | " myemail2@gmail.com | \n",
96 | " John | \n",
97 | " Doe | \n",
98 | " johndoe | \n",
99 | " password1 | \n",
100 | " Viewer | \n",
101 | " Viewer | \n",
102 | " Yes | \n",
103 | "
\n",
104 | " \n",
105 | " 2 | \n",
106 | " myemail3@gmail.com | \n",
107 | " Bill | \n",
108 | " Doe | \n",
109 | " billdoe | \n",
110 | " password1 | \n",
111 | " Viewer | \n",
112 | " Viewer | \n",
113 | " Yes | \n",
114 | "
\n",
115 | " \n",
116 | "
\n",
117 | "
"
118 | ],
119 | "text/plain": [
120 | " Email First Name Last Name Username Password Role \\\n",
121 | "0 myemail@gmail.com Jane Doe janedoe password1 Viewer \n",
122 | "1 myemail2@gmail.com John Doe johndoe password1 Viewer \n",
123 | "2 myemail3@gmail.com Bill Doe billdoe password1 Viewer \n",
124 | "\n",
125 | " User Type Track Viewer \n",
126 | "0 Viewer No \n",
127 | "1 Viewer Yes \n",
128 | "2 Viewer Yes "
129 | ]
130 | },
131 | "execution_count": 2,
132 | "metadata": {},
133 | "output_type": "execute_result"
134 | }
135 | ],
136 | "source": [
137 | "df = pd.read_csv(users_csv)\n",
138 | "df"
139 | ]
140 | },
141 | {
142 | "cell_type": "markdown",
143 | "metadata": {},
144 | "source": [
145 | "### Create users if necessary\n",
146 | "If the user in the CSV file does not exist in the organization, we'll add them"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": 3,
152 | "metadata": {},
153 | "outputs": [],
154 | "source": [
155 | "users = []\n",
156 | "for index, row in df.iterrows():\n",
157 | " u = gis.users.get(row[\"Username\"])\n",
158 | " if u is None:\n",
159 | " users.append(gis.users.create(\n",
160 | " username=row[\"Username\"], \n",
161 | " password=row[\"Password\"],\n",
162 | " firstname=row[\"First Name\"],\n",
163 | " lastname=row[\"Last Name\"],\n",
164 | " email=row[\"Email\"],\n",
165 | " role=row[\"Role\"],\n",
166 | " user_type=row[\"User Type\"]\n",
167 | " ))\n",
168 | " else:\n",
169 | " users.append(u)"
170 | ]
171 | },
172 | {
173 | "cell_type": "markdown",
174 | "metadata": {},
175 | "source": [
176 | "Each user is then assigned a Tracker for ArcGIS license so that they can use the mobile app"
177 | ]
178 | },
179 | {
180 | "cell_type": "code",
181 | "execution_count": 4,
182 | "metadata": {},
183 | "outputs": [],
184 | "source": [
185 | "tracker_license = gis.admin.license.get('Tracker for ArcGIS')\n",
186 | "for user in users:\n",
187 | " tracker_license.assign(username=user.username, entitlements=[\"tracker\"])"
188 | ]
189 | },
190 | {
191 | "cell_type": "markdown",
192 | "metadata": {},
193 | "source": [
194 | "### Create a new track view and add mobile users"
195 | ]
196 | },
197 | {
198 | "cell_type": "code",
199 | "execution_count": 5,
200 | "metadata": {},
201 | "outputs": [],
202 | "source": [
203 | "track_view = gis.admin.location_tracking.create_track_view(track_view_name)\n",
204 | "track_view.mobile_users.add(users)"
205 | ]
206 | },
207 | {
208 | "cell_type": "markdown",
209 | "metadata": {},
210 | "source": [
211 | "### Create a Track Viewer role if necessary\n",
212 | "In order to view other users tracks, track viewers need to have 2 specific privileges:\n",
213 | "- the ability to join a group\n",
214 | "- the ability to see others users tracks\n",
215 | "\n",
216 | "If a role titled \"Track Viewer\" does not exist, we'll create one."
217 | ]
218 | },
219 | {
220 | "cell_type": "code",
221 | "execution_count": 6,
222 | "metadata": {},
223 | "outputs": [],
224 | "source": [
225 | "for role in gis.users.roles.all():\n",
226 | " if role.name.lower() == \"Track Viewer\".lower():\n",
227 | " track_viewer_role = role\n",
228 | " break\n",
229 | "else:\n",
230 | " track_viewer_role = gis.users.roles.create(\n",
231 | " name='Track Viewer',\n",
232 | " description=\"A user that can use the Track Viewer web app to see others tracks\",\n",
233 | " privileges=[\n",
234 | " \"portal:user:joinGroup\",\n",
235 | " \"portal:user:viewTracks\",\n",
236 | " ]\n",
237 | " )"
238 | ]
239 | },
240 | {
241 | "cell_type": "markdown",
242 | "metadata": {},
243 | "source": [
244 | "### Add Track Viewers\n",
245 | "We'll now add track viewers to the track view based on the \"Track Viewer\" column in the CSV file"
246 | ]
247 | },
248 | {
249 | "cell_type": "code",
250 | "execution_count": 7,
251 | "metadata": {},
252 | "outputs": [],
253 | "source": [
254 | "for index, row in df[df['Track Viewer'] == \"Yes\"].iterrows():\n",
255 | " user = gis.users.get(row[\"Username\"])\n",
256 | " if \"portal:user:joinGroup\" not in user.privileges or \"portal:user:viewTracks\" not in user.privileges:\n",
257 | " user.update_role(track_viewer_role)\n",
258 | " track_view.viewers.add(user)"
259 | ]
260 | },
261 | {
262 | "cell_type": "markdown",
263 | "metadata": {},
264 | "source": [
265 | "### Let's confirm everything is all set"
266 | ]
267 | },
268 | {
269 | "cell_type": "code",
270 | "execution_count": 8,
271 | "metadata": {},
272 | "outputs": [
273 | {
274 | "data": {
275 | "text/plain": [
276 | "['billdoe', 'johndoe', 'admin']"
277 | ]
278 | },
279 | "execution_count": 8,
280 | "metadata": {},
281 | "output_type": "execute_result"
282 | }
283 | ],
284 | "source": [
285 | "track_view.viewers.list()"
286 | ]
287 | },
288 | {
289 | "cell_type": "code",
290 | "execution_count": 9,
291 | "metadata": {},
292 | "outputs": [
293 | {
294 | "data": {
295 | "text/plain": [
296 | "['janedoe', 'johndoe', 'billdoe']"
297 | ]
298 | },
299 | "execution_count": 9,
300 | "metadata": {},
301 | "output_type": "execute_result"
302 | }
303 | ],
304 | "source": [
305 | "track_view.mobile_users.list()"
306 | ]
307 | },
308 | {
309 | "cell_type": "markdown",
310 | "metadata": {},
311 | "source": [
312 | "### Generate a QR code for quick sign in\n",
313 | "Finally, a QR code is generated which can be scanned by a mobile device to quickly sign into the Tracker app."
314 | ]
315 | },
316 | {
317 | "cell_type": "code",
318 | "execution_count": 10,
319 | "metadata": {},
320 | "outputs": [
321 | {
322 | "data": {
323 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAT4AAAE+AQAAAAA8Q0GdAAACp0lEQVR4nO2Zy23EMAxECagAl6TWXZILEMCQM5SsLHzY6w5sBFnFfnuZkMOPzb+7hr3gC77gL4GWV29Xj4/43a64eY44u8fZcNAD49ODOtyOM5A4Nz/zJs4EBMGLqoQ8nWqFNvG9cSSYyumCA88RBZAkhTmbOpjCHCdjPw4jcNyXBZEK/B2B0PIbzrx/yhkRcNne08+jPyqAvJgEDTpFUMD476eCIG3P4XNUCznBQpdPFcEoaHGP4e94Tr9vTAIkhx44I8LSAOD0zsI+ZXNJ8LJx0PlSJNq8wwBGarbMXgq0pBAL+f+vAEEHl6GRB0UwEx19K4Tp9H6kQn6VVU4QRAcHMRAIDAp0cK0UEgTZpTpMLoRhccPUwj+3bk8IhLVPloOpIycgVXqDItiqVU8ZsnnndBKZQT/YC7sQSF/HcysxmPdTKlcEp+cZXL9Soc2FBBtYPXA9pN/XGd0N21hN0M8aTQ5nuzoDpMbTpgjOEEAjU7NpGuGYZ1cEuXsAgrEM8mRxo1rX1uMKgQNtG+UZ97rF1n1JkHvTxl1ade4YT2d5lwTb3cXUjs2W8R9s8QRBNiwYTYoah7OPG/bfAIRAUhCGC5hq4Vnwd7NXAqFHVblhlRBUxbg+lwQz3nMktSmPYcHm8x2CK4JMcW7KsTzujAUukj8HcRWQ3kaR8P/HHiIPH2OKFOizXcUsnpNovS5AU+ObP6qBKHF4+dlZ1XleRqgH8s3AKm41m85ljN2pIAVyFCuFqpdBdHymghTICw2s3wunioj60QNTj3oxkidmg1cH13d/VALvPn0tF2sit5JKEpzrNL+WWralgkuD51hrNqYF5tFtEJcE6X99FTe4/smXwIIgUsFrd15TGovA0AV326PxY6dooMyf/fHnwW+uF3zBF/wd8A/DxpqBKl6bGwAAAABJRU5ErkJggg==\n",
324 | "text/plain": [
325 | ""
326 | ]
327 | },
328 | "execution_count": 10,
329 | "metadata": {},
330 | "output_type": "execute_result"
331 | }
332 | ],
333 | "source": [
334 | "from IPython.core.display import Image\n",
335 | "url = pyqrcode.create(build_tracker_url(org))\n",
336 | "url.png(\"qr.png\", scale=6)\n",
337 | "Image(filename=\"qr.png\")"
338 | ]
339 | }
340 | ],
341 | "metadata": {
342 | "kernelspec": {
343 | "display_name": "Python 3",
344 | "language": "python",
345 | "name": "python3"
346 | },
347 | "language_info": {
348 | "codemirror_mode": {
349 | "name": "ipython",
350 | "version": 3
351 | },
352 | "file_extension": ".py",
353 | "mimetype": "text/x-python",
354 | "name": "python",
355 | "nbconvert_exporter": "python",
356 | "pygments_lexer": "ipython3",
357 | "version": "3.6.8"
358 | }
359 | },
360 | "nbformat": 4,
361 | "nbformat_minor": 2
362 | }
363 |
--------------------------------------------------------------------------------
/notebooks/examples/users.csv:
--------------------------------------------------------------------------------
1 | Email,First Name,Last Name,Username,Password,Role,User Type,Track Viewer
2 | myemail@gmail.com,Jane,Doe,janedoe,password1,Viewer,Viewer,No
3 | myemail2@gmail.com,John,Doe,johndoe,password1,Viewer,Viewer,Yes
4 | myemail3@gmail.com,Bill,Doe,billdoe,password1,Viewer,Viewer,Yes
--------------------------------------------------------------------------------
/readmes/check_edit_location.md:
--------------------------------------------------------------------------------
1 | ## Check the location and time that a worker edited a feature
2 |
3 | This script checks the location and time of when a Workforce assignment, Collector feature, or Survey123 survey was edited against the location of the worker at that same time using Tracker. It is designed to find out if workers are completing work orders without visiting the location.
4 | It reports features where the user might not have visited to the log file or standard ouptut. The user will insert the feature layer to be validated, the worker names they would like to check, and a date field they want to perform validation against.
5 |
6 | For example, an admin may use this script to verify whether or not a user was nearby when they completed a Workforce assignment by passing the "completedDate" field and assignments feature layer into the script.
7 |
8 | Location Tracking must be enabled for your organization to use this script. You must be either an admin or a track viewer who can view the tracks of each worker whose work you'd like to verify in order to use this script.
9 |
10 | Supports Python 3.6+
11 |
12 | ----
13 |
14 | Other than the authentication arguments (username, password, org) the script uses the following parameters:
15 |
16 | - -workers \, , - A comma-separated list of specific workers to check
17 | - -field-name - The date field name within the feature layer you use to integrate with Tracker. Use actual field name, not alias. Default is EditDate (for AGOL)
18 | - -layer-url - The feature service URL for your Survey, Collector, or Workforce assignments feature layer with features to be validated. This is required.
19 | - -time-tolerance \ - The time tolerance to use when checking workers locations. This value is used to provide a range around the time when the assignment was completed (optional - defaults to 10 minutes)
20 | - -distance-tolerance \ - The distance tolerance to use when checking if a worker completed the assignment at the assignment location (optional - defaults to 100 (m)). The units are in meters.
21 | - -min-accuracy \ - The minimum accuracy required when querying worker locations (optional - defaults to 50 (m)). The units are in meters.
22 | - -tracks-layer-url \ - The URL to the tracks layer from a Track View you want to utilize (optional - defaults to the tracks layer in your location tracking service)
23 | - -log-file \ The log file to use for logging messages
24 |
25 | Example Usage 1 - Check whether the three workers (admin_tracker, user_james, and user_aaron) were within 100 meters of the assignment location any time in the 10 minutes before and 10 minutes after the assignment was completed:
26 | ```python
27 | python check_edit_location.py -u username -p password -org https://arcgis.com -workers admin_tracker,user_james,user_aaron -field-name completedDate -time-tolerance 10 -distance-tolerance 100 -layer-url https://services.arcgis.com/a910db6b36ff4066a3d4131fccc3da9b/arcgis/rest/services/assignments_ad9af2fc00314fa79ce79ec7d7317acc/FeatureServer/0
28 | ```
29 |
30 | Example Usage 2 - Check whether the worker user_aaron, whose tracks lie within the Track View provided, was within 300 meters of a Collector feature he created any time within 3 minutes of creating the feature.
31 | ```python
32 | python check_edit_location.py -u username -p password -org https://arcgis.com -workers user_aaron -field-name CreationDate -time-tolerance 3 -distance-tolerance 300 -layer-url https://services.arcgis.com/a910db6b36ff4066a3d4131fccc3da9b/arcgis/rest/services/a14fa79ce79ec7d7317acc/FeatureServer/0 -tracks-layer-url https://locationservicesdev.arcgis.com/US6xjA1Nd8bW1aoA/arcgis/rest/services/5bfd7a0a1b6d4b698df17af205b8dbef_Track_View/FeatureServer/0
33 | ```
34 |
35 | ## What it does
36 |
37 | 1. First the script uses the provided credentials to authenticate with AGOL to get the required token
38 | 2. Then the feature layer to be validated is fetched
39 | 3. Then the location feature layers are fetched
40 | 4. For all features that were last edited by a worker in your provided list of workers, check if worker was within range when your provided field was edited (timeTol, distTol, and minAccuracy to determine whether in range)
41 |
--------------------------------------------------------------------------------
/readmes/export_tracks.md:
--------------------------------------------------------------------------------
1 | ## Export tracks from ArcGIS Online
2 |
3 | This script provides the ability to export tracks from a location tracking or track view feature service. This script only works for ArcGIS Online. It is designed to be a scheduled task to routinely export track data for storage outside of ArcGIS Online. It generates the CSV file on the server, then downloads it. It supports exporting the last X number of full days of tracks, where a day can be defined in any time zone.
4 |
5 | ----
6 |
7 | The script uses the following parameters:
8 |
9 | - -username \ - The username to authenticate with
10 | - -password \ - The password to authenticate with
11 | - -org \ - The organization url to sign in with. Defaults to "https://arcgis.com"
12 | - -track-item \ - The item id of the location tracking service or track view item. This is required.
13 | - -track-age \ - Number of previous full days of tracks to export. Default is 1.
14 | - -time-zone \ - The time zone that defines a "full day". These are defined by [IANA](https://www.iana.org/time-zones) Defaults to "UTC". Other examples: "America/New_York".
15 | - -output-directory \ - The directory where the CSV file should be stored.
16 | - -log-file \ - The log file to use for logging messages. Optional
17 |
18 | Example Usage: Last 25 days
19 | ```bash
20 | python export_tracks.py -username username -password password -track-age 25 -track-item 0e84dfc7a2a54bb5a7dfc04197b3fa0b -log-file log.txt -output-directory "/Users/exports"
21 | -time-zone "America/New_York"
22 | ```
23 |
24 | ## What it does
25 |
26 | 1. First the script uses the provided credentials to authenticate with ArcGIS Online.
27 | 2. Then the track item is fetched and exported to a new item using the specified relative date range.
28 | 3. Then that item is downloaded to the specified directory and is named using `"tracks__.csv`
29 |
--------------------------------------------------------------------------------
/readmes/generate_users_arcade_expression.md:
--------------------------------------------------------------------------------
1 | ## Generate a user-based arcade expression
2 |
3 | This script can generate a simple Arcade Expression that can be used when visualizing track or LKL data in a map. It takes a CSV file and at least two columns (username, category) and it will generate an expression to use in a map.
4 |
5 | The expression can be used as part of the layer symbology or as part of a popup configuration to display additional information about the tracked user.
6 |
7 | Supports Python 3.6+
8 |
9 | Example Input CSV:
10 |
11 | | username | category |
12 | |----------|-------------|
13 | | user1 | Medic |
14 | | user2 | Firefighter |
15 |
16 | Resulting Output:
17 | ```
18 | if ($feature.created_user == 'user1') {
19 | return 'Medic'
20 | }
21 |
22 | else if ($feature.created_user == 'user2') {
23 | return 'Firefighter'
24 | }
25 |
26 | return ''
27 | ```
28 |
29 | ----
30 |
31 | The script uses these arguments:
32 | - `--file ` - the CSV file to read
33 | - `--username-column ` - the column in the CSV file containing the usernames of the tracked users
34 | - `--other-column ` - the column in the CSV file containing the values to associate with the users (e.g. a category)
35 |
36 | Example Usage 1 - Printing to console
37 | ```bash
38 | python generate_users_arcade_expression.py --file users.csv --username-column usernames --other-column category
39 | ```
40 |
41 | Example Usage 2 - Direct to file
42 | ```bash
43 | python generate_users_arcade_expression.py --file users.csv --file users.csv --username-column usernames --other-column category > output.txt
44 | ```
45 |
46 | Example Usage 3 - Direct to Clipboard on Mac
47 | ```bash
48 | python generate_users_arcade_expression.py --file users.csv --file users.csv --username-column usernames --other-column category | pbcopy
49 | ```
50 |
51 | Example Usage 4 - Direct to Clipboard on Windows
52 | ```bash
53 | python generate_users_arcade_expression.py --file users.csv --file users.csv --username-column usernames --other-column category | CLIP
54 | ```
55 |
56 | ## What it does
57 |
58 | 1. Reads the provided CSV file
59 | 2. Generates an if/else if based arcade expression like:
60 | 3. Prints the expression
61 |
--------------------------------------------------------------------------------
/readmes/mirror_lkl_layer.md:
--------------------------------------------------------------------------------
1 | ## Mirror Last Known Locations layer (LKL) into a standard feature service
2 |
3 | This script will copy data from a Last Known Locations (LKL) layer in a Location Tracking (either Location Tracking Service or Location Tracking View) layer and mirror it into a standard feature service.
4 |
5 | As of April 2020, the LKL layer does not support dynamic joins, so the mirroring of data using this script may be useful if the user is trying to relate additional data to their last known locations data. Standard feature services support join features.
6 |
7 | Please see the code block below if attempting to use this script in Enterprise.
8 |
9 | This script is designed to be run as a scheduled task, at whatever frequency you want to get updated LKL data from your Location Tracking layer. For example, the admin may set up this script to run every 10 minutes in order to get the latest data from the field.
10 |
11 | For information on how to set up scheduled tasks, please see [this article](https://www.esri.com/arcgis-blog/products/arcgis-pro/analytics/schedule-a-python-script-or-model-to-run-at-a-prescribed-time-2019-update/)
12 |
13 | and [this article](https://www.esri.com/arcgis-blog/products/product/analytics/scheduling-a-python-script-or-model-to-run-at-a-prescribed-time/)
14 |
15 | This script requires the use of a point layer that the data can be mirrored into. This layer would ideally share all the same fields as a standard LKL layer, but could have less fields if all data is not requred. You can clone your Location Tracking Service (LTS) using the `clone_items` functionality in the Python API.
16 |
17 | To clone, do:
18 |
19 | ```python
20 | import arcgis
21 | gis = arcgis.gis.GIS("https://arcgis.com","myusername","mypassword")
22 | item = gis.content.get(gis.properties["helperServices"]["locationTracking"]["id"])
23 | cloned_item = gis.content.clone_items([item], copy_data=False)[0]
24 | # delete Tracks layer from your cloned feature layer collection, so that you're left with only the LKL layer
25 | flc = arcgis.features.FeatureLayerCollection(url=cloned_item.url, gis=gis)
26 | flc.manager.delete_from_definition({ "layers" : [{"id" : 0}]})
27 | # remove editor tracking on the layer
28 | flc.manager.update_definition({"editorTrackingInfo":{"enableEditorTracking":"false","enableOwnershipAccessControl":"false","allowOthersToUpdate":"true","allowOthersToDelete":"true","allowOthersToQuery":"true","allowAnonymousToUpdate":"true","allowAnonymousToDelete":"true"}})
29 |
30 | ```
31 | This will create a standard feature service in your organization whose item id you can pass. Note that this will turn off some of the security considerations in your LKL data.
32 |
33 | If you wish, you can also:
34 |
35 | Create a blank point layer (Content > Create > Feature Layer > Build a layer > Points) and then add your requisite fields to that layer. Ensure the field_name value is the same, not just the alias. Ensure you create an extra field named "created_user" if you take this approach
36 |
37 | Location Tracking must be enabled for your organization to use this script. You must be either at least a track viewer in order to use this script.
38 |
39 | In addition, the user may want to use Python to perform the join itself. While we recommend you use the "Export results of the Join features analysis tool as a hosted feature layer view" option while using AGOL (info about that can be found [here](https://www.esri.com/arcgis-blog/products/arcgis-online/mapping/visualizing-related-data-with-join-features-in-arcgis-online/)), Enterprise does not support this feature as of 10.8. While joining is not supported in the script out of the box, you can easily modify it and perform a left join using the Pandas library.
40 |
41 | Let's take an example where the we're joining LKL data to an external layer with a 1:1 relationship. This feature service will store the "Status" of each worker. The join will be on the "Creator" field in each service.
42 | 1. Add extra fields into your layer cloned from above to support the fields from the joined layer
43 | 2. After the applyEdits call has been made in the script (line 101, `mirror_layer.edit_features`), re-query the layer and store it as a sdf
44 | 3. Get external layer and convert to an SDF
45 | 4. Get most recent surveys submitted by each user
46 | 5. Use pandas to perform a left join on the two SDFs
47 | 6. For each user in the joined data, find its corresponding feature and append attributes. Then re-post the data.
48 |
49 | Our code looks something like
50 | ```python
51 | # convert LKL to sdf
52 | lkl_sdf = mirror_layer.query('1=1', as_df=True)
53 | status_layer_item = gis.content.get('status_layer_id')
54 | # get SDF of your extenral layer
55 | status_sdf = status_layer_item.layers[0].query(where='1=1', as_df=True)
56 | # perform merge
57 | overlap_rows = pandas.merge(left = lkl_sdf, right = status_sdf, how='left', left_on='created_user', right_on='Creator')
58 | updated_features = []
59 | features = mirror_layer.query('1=1').features
60 | for feature in features:
61 | # return row with matching created_user
62 | merged_row = overlap_rows.loc[overlap_rows['created_user'] == feature.attributes['created_user']]
63 | # try to update a field "Status", if fails assign not updated
64 | try:
65 | feature.attributes['Status'] = merged_row['Status'].values[0]
66 | except IndexError:
67 | feature.attributes['Status'] = "Not Reported"
68 | # repeat this for every field you want to update
69 | updated_features.append(feature)
70 | # post features with joined data over
71 | updates = mirror_layer.edit_features(updates=updated_features)
72 | ```
73 |
74 | Supports Python 3.6+
75 |
76 | ----
77 |
78 | Other than the authentication arguments (username, password, org) the script uses the following parameters:
79 |
80 | - -item_id - Item id in your portal of the feature service you want to mirror data into. See above for how to create this service. Required
81 | - -lkl-layer-url \ - The URL to the LKL layer from a Track View / Location Tracking Service you want to utilize. This URL should end in /1 (since we're targeting the second layer in the feature collection). Required
82 | - -log-file \ - The log file to use for logging messages. Optional
83 |
84 | Example Usage 1 - Mirror LKL data from the lkl layer url into the layer with the listed item id
85 | ```python
86 | python mirror_lkl_layer.py -u username -p password -org https://arcgis.com -item-id a05eee7b1cs5461db0e1ef1c1c4abe18 -lkl-layer-url https://locationservices9.arcgis.com/US6xjA1Nc8bW1aoA/arcgis/rest/services/f1087713d8934d5b8218dda736c26af4_Track_View/FeatureServer/1
87 | ```
88 |
89 | ## What it does
90 |
91 | 1. First the script uses the provided credentials to authenticate with AGOL to get the required token
92 | 2. Gets the LKL layer, which is provided to the script
93 | 3. Gets the layer that you are mirroring data into
94 | 4. Checks whether the LKL is already in the mirrored layer or is new
95 | 5. Posts updates with both new and existing features using the `edit_features` functionality in the Python API
--------------------------------------------------------------------------------
/readmes/polygon_cleanup_tracks.md:
--------------------------------------------------------------------------------
1 | ## Cleanup tracks based on spatial relationship with a polygon feature layer
2 |
3 | This script allows cleanup of track points from a tracks layer based on a spatial relationship to polygon geometry. You provide the script with a feature layer containing polygons, whether or not you want to delete track points inside or outside the polygons, and the script will perform a spatial intersection and delete any necessary features.
4 |
5 | By default, this script will delete the tracks found inside the polygons. However, by providing the --symmetric-difference parameter, the use can delete tracks falling outside the polygons and preserve the ones inside.
6 |
7 | Location Tracking must be enabled for your organization to use this script. You must be an admin to use this script.
8 |
9 | This script only works with ArcGIS Enterprise.
10 |
11 | Supports Python 3.6+
12 |
13 | ----
14 |
15 | Other than the authentication arguments (username, password, org) the script uses the following parameters:
16 |
17 | - -layer-url - The feature service URL for your polygon feature layer with features that will be spatially intersected with track points. This is required.
18 | - -where - The where clause used to filter out only certain polygons for the spatial comparison (for example, OBJECTID > 1). Defaults to 1=1 (all features are used for comparison)
19 | - --symmetric-difference - A parameter when provided, deletes features that fall outside the polygons. If not provided, delete track points inside the polygons.
20 |
21 | Example Usage 1
22 | ```bash
23 | python polygon_cleanup_tracks.py -u username -p password -org https://arcgis.com --symmetric-difference -where 'OBJECTID > 6' -layer-url https://services.arcgis.com/a910db6b36ff4066a3d4131fccc3da9b/arcgis/rest/services/polygons_ad9af2fc00314fa79ce79ec7d7317acc/FeatureServer/0
24 | ```
25 |
26 | ## What it does
27 |
28 | 1. First the script uses the provided credentials to authenticate with AGOL to get the required token
29 | 2. Then the feature layer to be validated is fetched
30 | 3. Then the location feature layers are fetched
31 | 4. Union the geometry of all polygons together
32 | 5. Compare the track points against your unioned polygon geometry
33 | 6. Delete features that do or do not intersect the polygon (based on whether you have set inside or outside)
34 |
--------------------------------------------------------------------------------
/scripts/check_edit_location.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2020 Esri
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
13 | This sample reports work orders (assignments, features, surveys) that were edited or created while the user was not close by using Tracker data
14 | You must be an admin in your organization to use this script
15 | """
16 | import argparse
17 | import datetime
18 | import logging
19 | import logging.handlers
20 | import pandas
21 | import traceback
22 | import sys
23 | import arcgis
24 | from arcgis.gis import GIS
25 | from arcgis.features import FeatureLayer
26 |
27 |
28 | def initialize_logging(log_file=None):
29 | """
30 | Setup logging
31 | :param log_file: (string) The file to log to
32 | :return: (Logger) a logging instance
33 | """
34 | # initialize logging
35 | formatter = logging.Formatter(
36 | "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()][%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s")
37 | # Grab the root logger
38 | logger = logging.getLogger()
39 | # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
40 | logger.setLevel(logging.DEBUG)
41 | # Create a handler to print to the console
42 | sh = logging.StreamHandler(sys.stdout)
43 | sh.setFormatter(formatter)
44 | sh.setLevel(logging.INFO)
45 | # Create a handler to log to the specified file
46 | if log_file:
47 | rh = logging.handlers.RotatingFileHandler(log_file, mode='a', maxBytes=10485760)
48 | rh.setFormatter(formatter)
49 | rh.setLevel(logging.DEBUG)
50 | logger.addHandler(rh)
51 | # Add the handlers to the root logger
52 | logger.addHandler(sh)
53 | return logger
54 |
55 |
56 | def return_field_name(layer, name_to_check):
57 | for field in layer.properties.fields:
58 | if field['name'].replace("_", "").lower() == name_to_check.replace("_", "").lower():
59 | return field['name']
60 |
61 |
62 | def get_invalid_work_orders(layer, field_name, time_tolerance, dist_tolerance, min_accuracy, workers, tracks_layer, logger):
63 | """
64 | Finds all invalid work orders by comparing a date vs. worker location
65 | """
66 | # Query for all features last edited by a worker in your list
67 | logger.info("Querying for features edited by a worker in your list")
68 | editor_field = return_field_name(layer, name_to_check="Editor")
69 | object_id_field = return_field_name(layer, name_to_check="OBJECTID")
70 | layer_query_string = ""
71 | if workers:
72 | for worker in workers:
73 | if layer_query_string != "":
74 | layer_query_string = layer_query_string + " OR "
75 | layer_query_string = layer_query_string + f"{editor_field} = '{worker}'"
76 | else:
77 | logger.info("Please pass at least one worker user_id")
78 | sys.exit()
79 | # These are the features whose corresponding editors we will check
80 | sr = {'wkid': 3857, 'latestWkid': 3857}
81 | features_df = layer.query(where=layer_query_string, out_sr=sr, return_all_records=True, as_df=True)
82 | if len(features_df.index) == 0:
83 | logger.info("No features found to check. Please check the user_id's that you have passed")
84 | sys.exit(0)
85 |
86 | # buffer features to use as geometry filter
87 | # Use the geometry service instead of the local geometry engine because there is a bug in the Python API
88 | # when using a shapely geometry as a filter
89 | buffered_geometries = arcgis.geometry.buffer(features_df["SHAPE"].tolist(), in_sr=sr, distances=dist_tolerance + min_accuracy, unit=9001)
90 | features_df["BUFFERED"] = buffered_geometries
91 | features_df.spatial.set_geometry("BUFFERED")
92 | features_df.spatial.sr = sr
93 |
94 | # Set field names
95 | accuracy_field = "horizontal_accuracy"
96 | creator_field = "created_user"
97 | timestamp_field = "location_timestamp"
98 |
99 | # Find invalid features
100 | invalid_features = []
101 | logger.info("Finding invalid features")
102 | for index, row in features_df.iterrows():
103 | # The date to check
104 | try:
105 | # date field may not be populated
106 | if not pandas.isnull(row[field_name]):
107 | date_to_check = row[field_name]
108 | else:
109 | continue
110 | except Exception as e:
111 | logger.info("Check that the exact field name exists in the feature layer")
112 | logger.info(e)
113 | sys.exit(0)
114 |
115 | # Add/Subtract some minutes to give a little leeway
116 | start_date = date_to_check - datetime.timedelta(minutes=time_tolerance)
117 | end_date = date_to_check + datetime.timedelta(minutes=time_tolerance)
118 |
119 | # Check there are actually tracks in your LTS in that time period. Otherwise, go to next feature
120 | check_track_query = f"{timestamp_field} < timestamp '{end_date.strftime('%Y-%m-%d %H:%M:%S')}'"
121 | check_tracks = tracks_layer.query(where=check_track_query, return_count_only=True)
122 | if check_tracks == 0:
123 | logger.info("For this feature, no tracks exist for the time period in your LTS. "
124 | "Ensure that tracks have been retained for the time period you're verifying")
125 | continue
126 |
127 | # Check worker has tracks for that time period
128 | check_worker_tracks_query = f"{timestamp_field} < timestamp '{end_date.strftime('%Y-%m-%d %H:%M:%S')}' AND {creator_field} = '{row[editor_field]}' "
129 | check_worker_tracks = tracks_layer.query(where=check_worker_tracks_query, return_count_only=True)
130 | if check_worker_tracks == 0:
131 | logger.info(f"The worker {row[editor_field]} who edited the feature {row[object_id_field]} does not have tracks for this time period")
132 | continue
133 |
134 | # Make a query string to select location by the worker during the time period
135 | loc_query_string = f"{creator_field} = '{row[editor_field]}' " \
136 | f"AND {timestamp_field} >= timestamp '{start_date.strftime('%Y-%m-%d %H:%M:%S')}' " \
137 | f"AND {timestamp_field} <= timestamp '{end_date.strftime('%Y-%m-%d %H:%M:%S')}' " \
138 | f"AND {accuracy_field} <= {min_accuracy}" \
139 |
140 | # Generate geometry filter, query the feature layer
141 | geom_filter = arcgis.geometry.filters.intersects(row['BUFFERED'], sr=sr)
142 | tracks_within_buffer = tracks_layer.query(where=loc_query_string, geometry_filter=geom_filter, return_count_only=True)
143 | # each element of the list is a list with two elements - user_id and object_id
144 | if tracks_within_buffer == 0:
145 | invalid_features.append([row[editor_field], row[object_id_field]])
146 | return invalid_features
147 |
148 |
149 | def main(arguments):
150 | # initialize logger
151 | logger = initialize_logging(arguments.log_file)
152 | # Create the GIS
153 | logger.info("Authenticating...")
154 | # First step is to get authenticate and get a valid token
155 | gis = GIS(arguments.org_url,
156 | username=arguments.username,
157 | password=arguments.password,
158 | verify_cert=not arguments.skip_ssl_verification)
159 |
160 | # Get the feature layer
161 | logger.info("Getting feature layer")
162 | layer = FeatureLayer(arguments.layer_url)
163 | logger.info("Getting tracks layer")
164 | if arguments.tracks_layer_url:
165 | tracks_layer = FeatureLayer(url=arguments.tracks_layer_url)
166 | else:
167 | try:
168 | tracks_layer = gis.admin.location_tracking.tracks_layer
169 | except Exception as e:
170 | logger.info(e)
171 | logger.info("Getting location tracking service failed - check that you are an admin and that location tracking is enabled for your organization")
172 | sys.exit(0)
173 |
174 | # Return invalid work orders
175 | workers = arguments.workers.replace(" ", "").split(",")
176 | invalid_work_orders = get_invalid_work_orders(layer,
177 | arguments.field_name,
178 | arguments.time_tolerance,
179 | arguments.distance_tolerance,
180 | arguments.min_accuracy,
181 | workers,
182 | tracks_layer,
183 | logger)
184 | if len(invalid_work_orders) == 0:
185 | logger.info("No features found that match the criteria you've set")
186 | else:
187 | for work_order in invalid_work_orders:
188 | logger.info(f"The user {work_order[0]} who last edited the feature with OBJECTID {work_order[1]} was potentially "
189 | f"not within the distance tolerance when updating the field {arguments.field_name}")
190 |
191 |
192 | if __name__ == "__main__":
193 | # Get all of the commandline arguments
194 | parser = argparse.ArgumentParser("Check that the worker was nearby when editing features")
195 | parser.add_argument('-u', dest='username', help="The username to authenticate with", required=True)
196 | parser.add_argument('-p', dest='password', help="The password to authenticate with", required=True)
197 | parser.add_argument('-org', dest='org_url', help="The url of the org/portal to use", required=True)
198 | # Parameters for tracker
199 | parser.add_argument('-workers', dest='workers', help="Comma separated list of user_id's for the workers to check")
200 | parser.add_argument('-field-name', dest='field_name', default="EditDate",
201 | help="The date field name within the feature layer you use to integrate with Tracker. "
202 | "Use actual field name, not alias. Default is EditDate (for AGOL)")
203 | parser.add_argument('-layer-url', dest='layer_url',
204 | help="The feature service URL for your Survey, Collector, or Workforce assignments feature layer with features to be verified",
205 | required=True)
206 | parser.add_argument('-log-file', dest='log_file', help="The log file to write to (optional)")
207 | parser.add_argument('-time-tolerance', dest='time_tolerance',
208 | help="The tolerance (in minutes) to check a given date field vs location", type=int, default=10)
209 | parser.add_argument('-distance-tolerance', dest='distance_tolerance', type=int, default=100,
210 | help='The distance tolerance to use (in meters)')
211 | parser.add_argument('-min-accuracy', dest='min_accuracy', default=50,
212 | help="The minimum accuracy to use (in meters)")
213 | parser.add_argument('-tracks-layer-url', dest='tracks_layer_url', default=None,
214 | help="The tracks layer (either location tracking service or tracks view) you'd like to use. "
215 | "Defaults to the Location Tracking Service tracks layer")
216 | parser.add_argument('--skip-ssl-verification',
217 | dest='skip_ssl_verification',
218 | action='store_true',
219 | help="Verify the SSL Certificate of the server")
220 | args = parser.parse_args()
221 | try:
222 | main(args)
223 | except Exception as e:
224 | logging.getLogger().critical("Exception detected, script exiting")
225 | logging.getLogger().critical(e)
226 | logging.getLogger().critical(traceback.format_exc().replace("\n", " | "))
227 |
--------------------------------------------------------------------------------
/scripts/export_tracks.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Esri
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
13 | This sample demonstrates how tracks can be exported from AGOL to CSV files
14 | """
15 | import argparse
16 | import datetime
17 | import pendulum
18 | import logging
19 | import logging.handlers
20 | import os
21 | import traceback
22 | import sys
23 | from arcgis.gis import GIS
24 |
25 |
26 | def initialize_logging(log_file=None):
27 | """
28 | Setup logging
29 | :param log_file: (string) The file to log to
30 | :return: (Logger) a logging instance
31 | """
32 | # initialize logging
33 | formatter = logging.Formatter(
34 | "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()][%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s")
35 | # Grab the root logger
36 | logger = logging.getLogger()
37 | # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
38 | logger.setLevel(logging.DEBUG)
39 | # Create a handler to print to the console
40 | sh = logging.StreamHandler(sys.stdout)
41 | sh.setFormatter(formatter)
42 | sh.setLevel(logging.INFO)
43 | # Create a handler to log to the specified file
44 | if log_file:
45 | rh = logging.handlers.RotatingFileHandler(log_file, mode='a', maxBytes=10485760)
46 | rh.setFormatter(formatter)
47 | rh.setLevel(logging.DEBUG)
48 | logger.addHandler(rh)
49 | # Add the handlers to the root logger
50 | logger.addHandler(sh)
51 | return logger
52 |
53 |
54 | def main(arguments):
55 | logger = initialize_logging(arguments.log_file)
56 | save_path = os.path.abspath(args.output_directory)
57 | if not os.path.isdir(save_path):
58 | raise Exception(f"Invalid directory: {save_path}")
59 | logger.info("Authenticating...")
60 | # Authenticate to ArcGIS Online
61 | gis = GIS(arguments.org_url,
62 | username=arguments.username,
63 | password=arguments.password)
64 | if gis.properties['isPortal']:
65 | raise Exception("Export is not supported for the location tracking service with ArcGIS Enterprise")
66 | logger.info("Exporting...")
67 | tracks_item = gis.content.get(args.tracks_item)
68 | if tracks_item is None:
69 | raise Exception(f"Unable to get item id: {args.tracks_item}")
70 | # Create date range using track age
71 | # Always export up through the last full day (intentionally excludes part of current day)
72 | start_date = pendulum.today(args.time_zone) - datetime.timedelta(days=args.track_age)
73 | end_date = pendulum.today(args.time_zone).at(23, 59, 59) - datetime.timedelta(days=1)
74 | # Export the tracks
75 | name = f"tracks_{start_date.to_date_string()}_{end_date.to_date_string()}"
76 | csv_item = tracks_item.export(name,
77 | export_format='CSV',
78 | parameters={
79 | "layers": [
80 | {
81 | "id": 0,
82 | "where": f"location_timestamp <= '{end_date.in_tz('UTC').to_datetime_string()}' AND "
83 | f"location_timestamp >= '{start_date.in_tz('UTC').to_datetime_string()}'"
84 | }
85 | ]
86 | }
87 | )
88 | logger.info("Downloading...")
89 | # Download the CSV file
90 | csv_item.download(save_path=save_path, file_name=f"{name}.csv")
91 | # Delete the hosted CSV file
92 | csv_item.delete()
93 | logger.info("Complete")
94 |
95 |
96 | if __name__ == "__main__":
97 | parser = argparse.ArgumentParser(
98 | "This exports tracks from a track view or location tracking service hosted in AGOL")
99 | parser.add_argument('-username', dest='username', help="The username to authenticate with", required=True)
100 | parser.add_argument('-password', dest='password', help="The password to authenticate with", required=True)
101 | parser.add_argument('-org', dest='org_url', help="The url of the org/portal to use", default="https://arcgis.com")
102 | parser.add_argument('-track-item', dest='tracks_item',
103 | help="The location tracking service or track view item to export",
104 | required=True)
105 | parser.add_argument('-track-age', dest='track_age', type=int, help="Number of previous full days of tracks to export", default=1)
106 | parser.add_argument('-time-zone', dest='time_zone', help="The timezone to use", default='UTC')
107 | parser.add_argument('-output-directory', dest='output_directory', help="The directory where the exported file will be stored", required=True)
108 | parser.add_argument('-log-file', dest='log_file', help="The log file to write to (optional)")
109 | args = parser.parse_args()
110 | try:
111 | main(args)
112 | except Exception as e:
113 | logging.getLogger().critical("Exception detected, script exiting")
114 | logging.getLogger().critical(e)
115 | logging.getLogger().critical(traceback.format_exc().replace("\n", " | "))
116 |
--------------------------------------------------------------------------------
/scripts/generate_users_arcade_expression.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2020 Esri
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
13 | This sample generates an arcade expression based on a CSV file of user-names, full names, or categories.
14 | """
15 | import argparse
16 | import csv
17 | import textwrap
18 |
19 |
20 | def main(args):
21 | expression = ""
22 | with open(args.file) as f:
23 | reader = csv.DictReader(f, skipinitialspace=True)
24 | for i, row in enumerate(reader):
25 | if i == 0:
26 | expression += textwrap.dedent(f"""
27 | if ($feature.created_user == '{row[args.username_column].strip()}') {{
28 | return '{row[args.other_column].strip()}'
29 | }}
30 | """)
31 | else:
32 | expression += textwrap.dedent(f"""
33 | else if ($feature.created_user == '{row[args.username_column].strip()}') {{
34 | return '{row[args.other_column].strip()}'
35 | }}
36 | """)
37 | expression += textwrap.dedent("""
38 | return ''
39 | """)
40 | print(expression)
41 |
42 |
43 | if __name__ == "__main__":
44 | # Get all of the commandline arguments
45 | parser = argparse.ArgumentParser("Generates an Arcade expression using a CSV file.")
46 | parser.add_argument('--file', '-f', dest='file', help="The file to open", required=True)
47 | parser.add_argument('--username-column', dest='username_column', help="The name of the column containing the usernames", default="username")
48 | parser.add_argument('--other-column', dest='other_column', help="The name of the other column containing the names, categories, or other strings.",
49 | default="category")
50 | args = parser.parse_args()
51 | main(args)
52 |
--------------------------------------------------------------------------------
/scripts/mirror_lkl_layer.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2020 Esri
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
13 | This sample mirrors LKL data from a Location Tracking Service or Location Tracking View into a standard feature service.
14 | That allows the user to support dynamic joins of data.
15 | """
16 | import argparse
17 | import logging
18 | import logging.handlers
19 | import traceback
20 | import sys
21 | from arcgis.gis import GIS
22 | from arcgis.features import FeatureLayer
23 |
24 |
25 | def initialize_logging(log_file=None):
26 | """
27 | Setup logging
28 | :param log_file: (string) The file to log to
29 | :return: (Logger) a logging instance
30 | """
31 | # initialize logging
32 | formatter = logging.Formatter(
33 | "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()][%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s")
34 | # Grab the root logger
35 | logger = logging.getLogger()
36 | # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
37 | logger.setLevel(logging.DEBUG)
38 | # Create a handler to print to the console
39 | sh = logging.StreamHandler(sys.stdout)
40 | sh.setFormatter(formatter)
41 | sh.setLevel(logging.INFO)
42 | # Create a handler to log to the specified file
43 | if log_file:
44 | rh = logging.handlers.RotatingFileHandler(log_file, mode='a', maxBytes=10485760)
45 | rh.setFormatter(formatter)
46 | rh.setLevel(logging.DEBUG)
47 | logger.addHandler(rh)
48 | # Add the handlers to the root logger
49 | logger.addHandler(sh)
50 | return logger
51 |
52 |
53 | def return_field_name(layer, name_to_check):
54 | for field in layer.properties.fields:
55 | if field['name'].replace("_", "").lower() == name_to_check.replace("_", "").lower():
56 | return field['name']
57 |
58 |
59 | def main(arguments):
60 | # initialize logger
61 | logger = initialize_logging(arguments.log_file)
62 | # Create the GIS
63 | logger.info("Authenticating...")
64 | # First step is to get authenticate and get a valid token
65 | gis = GIS(arguments.org_url,
66 | username=arguments.username,
67 | password=arguments.password,
68 | verify_cert=not arguments.skip_ssl_verification)
69 |
70 | # Get the feature layer
71 | if gis.content.get(arguments.item_id):
72 | logger.info("Getting feature layer")
73 | item = gis.content.get(arguments.item_id)
74 | mirror_layer = item.layers[0]
75 | if arguments.lkl_layer_url:
76 | lkl_layer = FeatureLayer(url=arguments.lkl_layer_url)
77 | else:
78 | logger.info("Please pass an LKL layer url!")
79 | sys.exit(0)
80 |
81 | # Query LKL and mirror layer
82 | lkl_fset = lkl_layer.query('1=1', out_sr=3857)
83 | if len(lkl_fset) == 0:
84 | logger.info("No LKLs in your layer yet!")
85 | sys.exit(0)
86 | mirror_fset = mirror_layer.query('1=1', out_sr=3857)
87 |
88 | add_features = []
89 | update_features = []
90 | logger.info("Iterating through current LKL data")
91 | for feature in lkl_fset:
92 | for mirror_feature in mirror_fset:
93 | # use "in" instead of == comparison due to the potential for brackets to be in the GUID field
94 | if mirror_feature.attributes[return_field_name(mirror_layer, "global_id")].lower() in feature.attributes["globalid"].lower():
95 | update_features.append(feature)
96 | break
97 | else:
98 | add_features.append(feature)
99 |
100 | logger.info("Posting updated data to mirrored layer")
101 | mirror_layer.edit_features(adds=add_features, updates=update_features, use_global_ids=True)
102 | logger.info("Completed!")
103 | else:
104 | logger.info("Item not found")
105 |
106 |
107 | if __name__ == "__main__":
108 | # Get all of the commandline arguments
109 | parser = argparse.ArgumentParser("Python script which maintains an exact replica of your LKL layer "
110 | "in a separate feature service, so that data can be joined")
111 | parser.add_argument('-u', dest='username', help="The username to authenticate with", required=True)
112 | parser.add_argument('-p', dest='password', help="The password to authenticate with", required=True)
113 | parser.add_argument('-org', dest='org_url', help="The url of the org/portal to use", required=True)
114 | # Parameters for tracker
115 | parser.add_argument('-item-id', dest='item_id', required=True,
116 | help="The item id of the layer you want the tracks to mirror to")
117 | parser.add_argument('-lkl-layer-url', dest='lkl_layer_url', required=True,
118 | help="The last known location (LKL) layer (either location tracking service or tracks view) you'd like to use. "
119 | "This URL should end in /1")
120 | parser.add_argument('-log-file', dest='log_file', help="The log file to write to (optional)")
121 | parser.add_argument('--skip-ssl-verification',
122 | dest='skip_ssl_verification',
123 | action='store_true',
124 | help="Verify the SSL Certificate of the server")
125 | args = parser.parse_args()
126 | try:
127 | main(args)
128 | except Exception as e:
129 | logging.getLogger().critical("Exception detected, script exiting")
130 | logging.getLogger().critical(e)
131 | logging.getLogger().critical(traceback.format_exc().replace("\n", " | "))
132 |
--------------------------------------------------------------------------------
/scripts/polygon_cleanup_tracks.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2020 Esri
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
13 | This sample allows cleanup of track points from a tracks layer based on a spatial relationship to polygon geometry.
14 | Requires being an admin to run this script
15 | """
16 | import argparse
17 | import logging
18 | import logging.handlers
19 | import traceback
20 | import sys
21 | from arcgis.gis import GIS
22 | from arcgis.features import FeatureLayer
23 | from arcgis import geometry
24 |
25 |
26 | def initialize_logging(log_file=None):
27 | """
28 | Setup logging
29 | :param log_file: (string) The file to log to
30 | :return: (Logger) a logging instance
31 | """
32 | # initialize logging
33 | formatter = logging.Formatter(
34 | "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()][%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s")
35 | # Grab the root logger
36 | logger = logging.getLogger()
37 | # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
38 | logger.setLevel(logging.DEBUG)
39 | # Create a handler to print to the console
40 | sh = logging.StreamHandler(sys.stdout)
41 | sh.setFormatter(formatter)
42 | sh.setLevel(logging.INFO)
43 | # Create a handler to log to the specified file
44 | if log_file:
45 | rh = logging.handlers.RotatingFileHandler(log_file, mode='a', maxBytes=10485760)
46 | rh.setFormatter(formatter)
47 | rh.setLevel(logging.DEBUG)
48 | logger.addHandler(rh)
49 | # Add the handlers to the root logger
50 | logger.addHandler(sh)
51 | return logger
52 |
53 |
54 | def form_donut(rings):
55 | for ring in rings:
56 | ring.reverse()
57 | # append the max extent as the clockwise outside ring
58 | rings.append([[-20037508.3427892, -20037508.3427892], [-20037508.3427892, 20037508.3427892], [20037508.3427892, 20037508.3427892],
59 | [20037508.3427892, -20037508.3427892], [-20037508.3427892, -20037508.3427892]])
60 | return rings
61 |
62 |
63 | def main(arguments):
64 | # initialize logger
65 | logger = initialize_logging(arguments.log_file)
66 | # Create the GIS
67 | logger.info("Authenticating...")
68 | # First step is to get authenticate and get a valid token
69 | gis = GIS(arguments.org_url,
70 | username=arguments.username,
71 | password=arguments.password,
72 | verify_cert=not arguments.skip_ssl_verification)
73 | if not gis.properties.isPortal:
74 | logger.error("This script only works with ArcGIS Enterprise")
75 | sys.exit(0)
76 |
77 | logger.info("Getting location tracking service")
78 | try:
79 | tracks_layer = gis.admin.location_tracking.tracks_layer
80 | except Exception as e:
81 | logger.info(e)
82 | logger.info("Getting location tracking service failed - check that you are an admin and that location tracking is enabled for your organization")
83 | sys.exit(0)
84 |
85 | logger.info("Getting polygon layer")
86 | try:
87 | layer = FeatureLayer(url=args.layer_url, gis=gis)
88 | _ = layer._lyr_json
89 | except Exception as e:
90 | logger.info(e)
91 | logger.info("Layer could not be found based on given input. Please check your parameters again. Exiting the script")
92 | sys.exit(0)
93 |
94 | features = layer.query(where=args.where, out_sr=3857).features
95 | if len(features) > 0:
96 | geometries = [feature.geometry for feature in features]
97 | logger.info("Unifying geometry data")
98 | union_geometry = geometry.union(spatial_ref=3857, geometries=geometries, gis=gis)
99 | if args.symmetric_difference:
100 | union_geometry['rings'] = form_donut(union_geometry['rings'])
101 | intersect_filter = geometry.filters.intersects(union_geometry, sr=3857)
102 | logger.info("Querying features")
103 | x = tracks_layer.delete_features(geometry_filter=intersect_filter)
104 | logger.info("Deleting features")
105 | logger.info("Deleted: " + str(len(x['deleteResults'])) + " tracks")
106 | logger.info("Completed!")
107 |
108 |
109 | if __name__ == "__main__":
110 | # Get all of the commandline arguments
111 | parser = argparse.ArgumentParser(
112 | "This sample allows cleanup of track points from a tracks layer based on a spatial relationship to polygon geometry")
113 | parser.add_argument('-u', dest='username', help="The username to authenticate with", required=True)
114 | parser.add_argument('-p', dest='password', help="The password to authenticate with", required=True)
115 | parser.add_argument('-org', dest='org_url', help="The url of the org/portal to use", required=True)
116 | # Parameters for tracker
117 | parser.add_argument('-layer-url', dest='layer_url',
118 | help="The feature service URL for your layer with the geometry you want to use to delete track points",
119 | required=True)
120 | parser.add_argument('-where', dest='where', help="Query conditions for polygons you want to use in your cleanup. Defaults to all (1=1)", default="1=1")
121 | parser.add_argument('--symmetric-difference', action='store_true', dest='symmetric_difference',
122 | help="If provided, delete the tracks outside the polygon(s). If not provided, delete the tracks inside the polygon")
123 | parser.add_argument('-log-file', dest='log_file', help="The log file to write to (optional)")
124 | parser.add_argument('--skip-ssl-verification',
125 | dest='skip_ssl_verification',
126 | action='store_true',
127 | help="Verify the SSL Certificate of the server")
128 | args = parser.parse_args()
129 | try:
130 | main(args)
131 | except Exception as e:
132 | logging.getLogger().critical("Exception detected, script exiting")
133 | logging.getLogger().critical(e)
134 | logging.getLogger().critical(traceback.format_exc().replace("\n", " | "))
135 |
--------------------------------------------------------------------------------