├── LICENSE.txt ├── README.md ├── arcade ├── README.MD ├── dashboard │ ├── .gitignore │ └── README.md ├── labeling │ └── .gitignore ├── popup │ ├── .gitignore │ └── get-article-title-from-url-link │ │ ├── README.md │ │ └── script.js └── visualization │ ├── .gitignore │ └── predominance-map-multiple-columns │ ├── README.md │ ├── primary-field-style.js │ └── set-transparency-as-strength-of-predominance.js ├── arcgis-pro ├── .gitignore └── README.MD ├── javascript-api ├── .gitignore └── README.MD ├── python ├── README.MD ├── api │ ├── .gitignore │ ├── add-attribute-index-hosted-feature-layer │ │ ├── Add Attribute Index to a Hosted Feature Layer.ipynb │ │ └── README.md │ ├── arcgis-online-admin │ │ ├── ArcGIS Online Administration.ipynb │ │ ├── README.md │ │ ├── fl_details.csv │ │ ├── item_details.csv │ │ └── user_details.csv │ ├── change-cache-control-hosted-feature-layer │ │ ├── Change the Cache Control for a Hosted Feature Layer.ipynb │ │ └── README.md │ ├── csv-to-hosted-table-service │ │ ├── Publish CSV file as Hosted Feature Layer or Table.ipynb │ │ └── README.md │ ├── delete-fields-from-hosted-feature-layer │ │ ├── Delete Fields from a Hosted Feature Layer.ipynb │ │ └── README.md │ ├── get-portal-usage-by-user │ │ ├── Get Portal Usage by User.ipynb │ │ └── README.md │ ├── ocr-scrape-coordinates-from-image-pixels │ │ ├── README.md │ │ └── ocr-scrpae-coordinates-from-image-pixels.ipynb │ ├── plot-images-from-exif-label-from-wikipedia │ │ ├── README.md │ │ ├── Rome.zip │ │ └── plot-images-from-exif-and-label-with-wikipedia.ipynb │ ├── predict-satellite-imaging-windows │ │ └── README.md │ ├── publish-hosted-feature-layer-not-editable │ │ ├── Publish a Hosted Feature Layer without Editing enabled.ipynb │ │ └── README.md │ ├── scrape-wikipedia-for-external-coordinates │ │ ├── README.md │ │ └── scrape-wikipedia-page-for-external-coordinates.ipynb │ └── update-view-definition-with-polygon-hosted-feature-layer │ │ ├── README.md │ │ └── Update View Definition of a Hosted Feature Layer with a Polygon.ipynb ├── arcpy │ ├── .gitignore │ └── dataFrameToTable │ │ ├── README.md │ │ ├── dataFrameToTable.py │ │ └── test_dataFrameToTable.py └── misc │ ├── .gitignore │ ├── dataFrameToDatabase │ ├── README.md │ ├── dataFrameToDatabase.py │ └── test_dataFrameToDatabase.py │ └── fileDownloader │ ├── README.md │ ├── customErrors.py │ ├── downloadManualTest.py │ ├── fileDownloader.py │ └── test_fileDownloader.py ├── r ├── .gitignore └── README.MD ├── runtime-sdk ├── .gitignore └── README.MD └── survey123 ├── .gitignore ├── README.MD ├── forms └── .gitignore └── javascript ├── .gitignore └── IndexedRepeat.js /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License - 2.0 2 | 3 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 4 | 5 | 1. Definitions. 6 | 7 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 8 | 9 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 10 | 11 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control 12 | with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management 13 | of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial 14 | ownership of such entity. 15 | 16 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 17 | 18 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, 19 | and configuration files. 20 | 21 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to 22 | compiled object code, generated documentation, and conversions to other media types. 23 | 24 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice 25 | that is included in or attached to the work (an example is provided in the Appendix below). 26 | 27 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the 28 | editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes 29 | of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, 30 | the Work and Derivative Works thereof. 31 | 32 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work 33 | or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual 34 | or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of 35 | electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on 36 | electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for 37 | the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing 38 | by the copyright owner as "Not a Contribution." 39 | 40 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and 41 | subsequently incorporated within the Work. 42 | 43 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, 44 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, 45 | publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 46 | 47 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, 48 | non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, 49 | sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are 50 | necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was 51 | submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work 52 | or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You 53 | under this License for that Work shall terminate as of the date such litigation is filed. 54 | 55 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, 56 | and in Source or Object form, provided that You meet the following conditions: 57 | 58 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and 59 | 60 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and 61 | 62 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices 63 | from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 64 | 65 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a 66 | readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the 67 | Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the 68 | Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever 69 | such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. 70 | You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, 71 | provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to 72 | Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your 73 | modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with 74 | the conditions stated in this License. 75 | 76 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You 77 | to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, 78 | nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 79 | 80 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except 81 | as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 82 | 83 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides 84 | its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, 85 | any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for 86 | determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under 87 | this License. 88 | 89 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required 90 | by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, 91 | including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the 92 | use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or 93 | any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 94 | 95 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a 96 | fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting 97 | such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree 98 | to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your 99 | accepting any such warranty or additional liability. 100 | 101 | END OF TERMS AND CONDITIONS 102 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Geospatial-Center-Code 2 | 3 | A repository for helpful scripts created by the Geospatial Center team (and others!). The Geospatial Center is a group of Solution Engineers across different teams within Esri. Through our day-to-day work, we often find ourselves creating helpful scripts for products across the ArcGIS software stack. This repo will hold those scripts as they become available. 4 | 5 | For more information contact Adam Pfister ([apfister@esri.com](apfister@esri.com)) 6 | 7 | ## Contributing to the best code in the world! 8 | 9 | **If you'd like to contribute, please follow the [example template](https://github.com/Esri/Geospatial-Center-Code/issues/2) for what files and details are needed.** 10 | 11 | [Visual Studio Code](https://code.visualstudio.com/) is a great IDE to use with strong and easy-to-use GitHub integration. Generally, you can follow this workflow for contributing using VSCode: 12 | 13 | - In VSCode 14 | - Cloning the repo 15 | - Making a local copy of the repo on your machine 16 | - Creating a branch 17 | - You’ll make your changes/additions here first 18 | - Adding your script(s) 19 | - Literally creating folders/files for your scripts 20 | - Staging the changes 21 | - For example, if you have 10 files, but only want to add 3 of them, you would “stage” only those 3 files 22 | - Writing a commit message 23 | - A short descriptive message about what’s being added/changed 24 | - Committing your new additions to your new branch 25 | - Move your additions/changes from “staged” to “committed” in your branch only 26 | - Push changes to your new branch to the remote repo on Github.com 27 | - Send the additions/changes from your local machine to the remote repo on GH.com 28 | - This results in creating a remote version of your branch in the remote repo on GH.com 29 | - On Github.com 30 | - Create a Pull Request (commonly referred to as a “PR”) to merge your new branch into the “master” branch 31 | - Create a request that will merge your branch into the “master” branch 32 | - Comments/discussion can happen around this pull request as needed 33 | - Merging your changes/additions into the master branch 34 | - This will merge your branch into the “master” branch and make your script available to all! 35 | - Delete your local and remote branches 36 | 37 | _Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing)._ 38 | 39 | ## Issues 40 | 41 | Find a bug or want to request a new feature? Please let us know by submitting an issue. 42 | 43 | ## Licensing 44 | 45 | Copyright 2020 Esri 46 | 47 | Licensed under the Apache License, Version 2.0 (the "License"); 48 | you may not use this file except in compliance with the License. 49 | You may obtain a copy of the License at 50 | 51 | http://www.apache.org/licenses/LICENSE-2.0 52 | 53 | Unless required by applicable law or agreed to in writing, software 54 | distributed under the License is distributed on an "AS IS" BASIS, 55 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 56 | See the License for the specific language governing permissions and 57 | limitations under the License. 58 | 59 | A copy of the license is available in the repository's [LICENSE](LICENSE.txt) file. 60 | -------------------------------------------------------------------------------- /arcade/README.MD: -------------------------------------------------------------------------------- 1 | ## Arcade 2 | 3 | --- 4 | 5 | Arcade is a simple, portable scripting language for creating custom visualizations, labeling expressions, and much more. 6 | 7 | For more infomation, visit the Arcade [Homepage](https://developers.arcgis.com/arcade/). 8 | 9 | This folder will be organized by Arcade [Profile](https://developers.arcgis.com/arcade/guide/profiles/). A **profile** is a context in which an Arcade expression is evaluated and understood. They expect specific inputs and output types depending on the use case. See below for profile descriptions. 10 | 11 | --- 12 | 13 | ### Dashboard 14 | 15 | `dashboard` 16 | 17 | The [Dashboard profile](https://developers.arcgis.com/arcade/guide/profiles/#dashboard) is used for the list and indicator elements in the ArcGIS Dashboards Beta. An expression can be written to customize the way elements render data points. It is expected that the script returns a dictionary of name-value pairs specific for the element. 18 | 19 | ### Labeling 20 | 21 | `labeling` 22 | 23 | The [Labeling profile](https://developers.arcgis.com/arcade/guide/profiles/#labeling) allows the map author to write an expression that determines the label to show on the map for each feature. The script evaluates for each label as it is to be drawn. It is expected that the script returns a text value, comprising the label to be drawn. 24 | 25 | ### Popup 26 | 27 | `popup` 28 | 29 | With the [Popup profile](https://developers.arcgis.com/arcade/guide/profiles/#popup), map authors can write expressions that return values for display in the view's popup. Expressions can be referenced in the popup content's text template, field tables, and media charts. When the popup displays, the script will execute using the feature's attributes as variables in the expression. The script should return either text, a number, or date and place it in the table cell or text comprising the popup's content. Values returned for charts will be used as the basis for the data-driven chart. 30 | 31 | ### Visualization 32 | 33 | `visualization` 34 | 35 | The [Visualization profile](https://developers.arcgis.com/arcade/guide/profiles/#visualization) allows the map author to write an expression that evaluates to a value used to drive the visualization. This could be used for a visual variable such as size, or as a value in a class breaks renderer. When the feature is to be drawn, the script will be evaluated. It is expected that the script returns a value specific to the rendering property being calculated (e.g. A number for a class breaks visualization or text for a unique value visualization). 36 | -------------------------------------------------------------------------------- /arcade/dashboard/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/arcade/dashboard/.gitignore -------------------------------------------------------------------------------- /arcade/dashboard/README.md: -------------------------------------------------------------------------------- 1 | ## Using Arcade in ArcGIS Dashboards 2 | 3 | For more details and examples on using Arcade with ArcGIS Dashboards, read these two blog posts: 4 | 5 | - [Getting started with Arcade in the ArcGIS Dashboards Beta](https://www.esri.com/arcgis-blog/products/ops-dashboard/real-time/getting-started-with-arcade-in-arcgis-dashboards/) 6 | - [Pushing the limits of Arcade in dashboards](https://www.esri.com/arcgis-blog/products/ops-dashboard/real-time/pushing-the-limits-of-arcade-in-dashboards/) 7 | -------------------------------------------------------------------------------- /arcade/labeling/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/arcade/labeling/.gitignore -------------------------------------------------------------------------------- /arcade/popup/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/arcade/popup/.gitignore -------------------------------------------------------------------------------- /arcade/popup/get-article-title-from-url-link/README.md: -------------------------------------------------------------------------------- 1 | ## Get Article Title from URL Link 2 | 3 | Original Author: James Sullivan 4 | 5 | This script is used to extract a news article title from a URL (contained in an attribute field, “url”), and was developed specifically for articles cataloged in the [GDELT project](https://www.gdeltproject.org/) and accessed through their API. In order for it to work, the article name must be included in the URL. In general, the script works backwards from the end of the URL, splits on the “/,” identifies the title based on the “-“ character separating the words, does some formatting, then reassembles the title back together. The script will handle a few different formats and unique cases. 6 | 7 | For example: 8 | 9 | https://www.forbesindia.com/article/take-one-big-story-of-the-day/drones-waft-in-a-world-of-virtual-dealmaking/63285/1 10 | 11 | This case is fairly straight forward, and the script will split on the “/” then find the first “-“ and pull the title from that. There isn’t much clean up required. 12 | 13 | https://www.thehour.com/news/article/State-police-ID-couple-in-murder-suicide-15628872.php 14 | 15 | In this case, there’s some additional error checking for the number at the end of the title, “15628872,” that will be identified based on the “.” as not being part of the title and discarded. 16 | 17 | https://www.syracuse.com/business/2020/10/israeli-drone-company-to-open-control-center-in-syracuse.html 18 | 19 | In this example, when the “.” is identified the script will determine that the text connected to it, “syracuse” is actually part of the title and will be held on to. 20 | 21 | https://www.theepochtimes.com/boy-5-sends-baby-yoda-doll-to-front-line-oregon-firefighters-in-case-you-get-lonely_3519946.html 22 | 23 | In this example, the number at the end, “3519946,” is not meant to be part of the title, but is connected by a “_” and not a “-“ like the previous example. The script can handle this as well by replacing all “_” with a “-“ because splitting the title out. 24 | -------------------------------------------------------------------------------- /arcade/popup/get-article-title-from-url-link/script.js: -------------------------------------------------------------------------------- 1 | var url = $feature.url; 2 | var lowerCaseTitleWords = ['a', 'an', 'of', 'and', 'on', 'the', 'in', 'for', 'from', 'to']; 3 | 4 | function addWord(currentTitle, word) { 5 | var splitWordParts = split(word, '.'); 6 | if (currentTitle != "" && indexof(lowerCaseTitleWords, lower(splitWordParts[0])) > -1) { 7 | currentTitle += lower(splitWordParts[0]) + ' '; 8 | } else if (IsNan(Number(splitWordParts[0])) || (Number(splitWordParts[0])) < 10000) { 9 | currentTitle += Proper(splitWordParts[0]) + ' '; 10 | } 11 | return currentTitle; 12 | } 13 | 14 | var title = ""; 15 | var list = Split(url, '/', -1, true); 16 | 17 | for (var i = count(list) - 1; i >= 0; i--) { 18 | var urlTitle = replace(list[i], "_", "-"); 19 | var title2 = Split(urlTitle, '-'); 20 | 21 | if (count(title2) > 1) { 22 | for (var j = 0; j < count(title2); j++) { 23 | console(title2[j]); 24 | title = addWord(title, title2[j]); 25 | } 26 | return title; 27 | } 28 | } -------------------------------------------------------------------------------- /arcade/visualization/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/arcade/visualization/.gitignore -------------------------------------------------------------------------------- /arcade/visualization/predominance-map-multiple-columns/README.md: -------------------------------------------------------------------------------- 1 | ## Create Predominance Map with Multiple Columns 2 | 3 | Original Author: Bonnie Stayer 4 | 5 | Ever wanted to create a predominance map but had too many columns to consider? I'm working with NAICS data in this example. 6 | 7 | [View it Live](https://arcg.is/1avjXj) 8 | 9 | ### Considerations: 10 | 11 | I'd be particularly interested to see if someone has more clever way of addressing the highlighted step (how you get the column name back once you've got the highest value). That's the most cumbersome part, and you're actually returning hardcoded text rather than the real column name. 12 | -------------------------------------------------------------------------------- /arcade/visualization/predominance-map-multiple-columns/primary-field-style.js: -------------------------------------------------------------------------------- 1 | //create an array of all the fields you want to consider and sort 2 | // (this defaults to least to greatest) 3 | var industryarray = Sort([ 4 | $feature["Accommodation_and_Food_Services"], 5 | $feature["Administrative_and_Support_and_"], 6 | $feature["Agriculture__Forestry__Fishing_"], 7 | $feature["Arts__Entertainment__and_Recrea"], 8 | $feature.Construction, 9 | $feature["Educational_Services"], 10 | $feature["Finance_and_Insurance"], 11 | $feature["Health_Care_and_Social_Assistan"], 12 | $feature.Information, 13 | $feature["Management_of_Companies_and_Ent"], 14 | $feature.Manufacturing, 15 | $feature.Mining, 16 | $feature["Not_Specified"], 17 | $feature["Other_Services__except_Public_A"], 18 | $feature["Professional__Scientific__and_T"], 19 | $feature["Public_Administration"], 20 | $feature["Real_Estate_Rental_and_Leasing"], 21 | $feature["Retail_Trade"], 22 | $feature["Transportation_and_Warehousing"], 23 | $feature.Utilities, 24 | $feature["Wholesale_Trade"] 25 | ]); 26 | 27 | //reverse the sorting to go from greatest to least, then get the first value 28 | var topvalue = First(Reverse(industryarray)); 29 | 30 | // to get the name of the corresponding field, 31 | // compare the top value to all the fields in question, 32 | // and return the appropriate name 33 | Decode(topvalue, 34 | $feature["Agriculture__Forestry__Fishing_"],'Agriculture, Forestry, Fishing, and Hunting', 35 | $feature.Mining,'Mining', 36 | $feature.Utilities,'Utilities', 37 | $feature.Construction,'Construction', 38 | $feature.Manufacturing,'Manufacturing', 39 | $feature["Wholesale_Trade"],'Wholesale Trade', 40 | $feature["Retail_Trade"],'Retail Trade', 41 | $feature["Transportation_and_Warehousing"],'Transportation and Warehousing', 42 | $feature.Information,'Information', 43 | $feature["Finance_and_Insurance"],'Finance and Insurance', 44 | $feature["Real_Estate_Rental_and_Leasing"],'Real Estate Rental and Leasing', 45 | $feature["Professional__Scientific__and_T"],'Professional, Scientific, and Technical Services', 46 | $feature["Management_of_Companies_and_Ent"],'Management of Companies and Enterprises', 47 | $feature["Administrative_and_Support_and_"],'Administrative and Support and Waste Management and Remediation Services', 48 | $feature["Educational_Services"],'Educational Services', 49 | $feature["Health_Care_and_Social_Assistan"],'Health Care and Social Assistance', 50 | $feature["Arts__Entertainment__and_Recrea"],'Arts, Entertainment, and Recreation', 51 | $feature["Accommodation_and_Food_Services"],'Accommodation and Food Services', 52 | $feature["Other_Services__except_Public_A"],'Other Services (except Public Administration)', 53 | $feature["Public_Administration"],'Public Administration','Not Specified'); -------------------------------------------------------------------------------- /arcade/visualization/predominance-map-multiple-columns/set-transparency-as-strength-of-predominance.js: -------------------------------------------------------------------------------- 1 | //start with the same sorted array 2 | var industryarray = Sort([ 3 | $feature["Accommodation_and_Food_Services"], 4 | $feature["Administrative_and_Support_and_"], 5 | $feature["Agriculture__Forestry__Fishing_"], 6 | $feature["Arts__Entertainment__and_Recrea"], 7 | $feature.Construction, 8 | $feature["Educational_Services"], 9 | $feature["Finance_and_Insurance"], 10 | $feature["Health_Care_and_Social_Assistan"], 11 | $feature.Information, 12 | $feature["Management_of_Companies_and_Ent"], 13 | $feature.Manufacturing, 14 | $feature.Mining, 15 | $feature["Not_Specified"], 16 | $feature["Other_Services__except_Public_A"], 17 | $feature["Professional__Scientific__and_T"], 18 | $feature["Public_Administration"], 19 | $feature["Real_Estate_Rental_and_Leasing"], 20 | $feature["Retail_Trade"], 21 | $feature["Transportation_and_Warehousing"], 22 | $feature.Utilities, 23 | $feature["Wholesale_Trade"]]); 24 | 25 | //reverse the sorting, then get the top two values; assign each to a variable 26 | var toptwo = Top(Reverse(industryarray),2); 27 | var topvalue = First(toptwo); 28 | var secondvalue = First(Reverse(toptwo)); 29 | 30 | 31 | //calculate how close the second value is to the top value as a percent, 32 | // then subtract to get the percent difference; 33 | // this output is used in setting the transparency 34 | var pctdiff = (secondvalue / topvalue) * 100; 35 | var transparencypct = Round(100 - pctdiff,2); 36 | transparencypct; -------------------------------------------------------------------------------- /arcgis-pro/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/arcgis-pro/.gitignore -------------------------------------------------------------------------------- /arcgis-pro/README.MD: -------------------------------------------------------------------------------- 1 | ## ArcGIS Pro 2 | 3 | ArcGIS Pro is the latest professional desktop GIS application from Esri. With ArcGIS Pro, you can explore, visualize, and analyze data; create 2D maps and 3D scenes; and share your work to your ArcGIS Online or ArcGIS Enterprise portal. The sections below introduce basic elements of the user interface. 4 | 5 | Extend ArcGIS Pro using the ArcGIS Pro SDK for Microsoft .NET. Develop add-ins and solution configurations to create a custom Pro UI and user experience for your organization. 6 | 7 | For more information, visit the [ArcGIS Pro SDK Homepage](https://pro.arcgis.com/en/pro-app/sdk/). 8 | -------------------------------------------------------------------------------- /javascript-api/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/javascript-api/.gitignore -------------------------------------------------------------------------------- /javascript-api/README.MD: -------------------------------------------------------------------------------- 1 | ## ArcGIS API for JavaScript 2 | 3 | The ArcGIS API for JavaScript allows you to build modern JavaScript web mapping applications with ease. 4 | 5 | For more information, visit the [Homepage](https://developers.arcgis.com/javascript/). 6 | -------------------------------------------------------------------------------- /python/README.MD: -------------------------------------------------------------------------------- 1 | ## Python 2 | 3 | --- 4 | 5 | When developing with Python within the ArcGIS platform, you have two options, **ArcPy**, and/or the **ArcGIS API for Python** 6 | 7 | ### ArcPy 8 | 9 | `arcpy` 10 | 11 | ArcPy is a Python site package that provides a useful and productive way to perform geographic data analysis, data conversion, data management, and map automation with Python. 12 | 13 | This package provides a rich and native Python experience offering code completion (type a keyword and a dot to get a pop-up list of properties and methods supported by that keyword; select one to insert it) and reference documentation for each function, module, and class. 14 | 15 | For more information, visit the [Homepage](https://pro.arcgis.com/en/pro-app/arcpy/get-started/what-is-arcpy-.htm). 16 | 17 | ### ArcGIS API for Python 18 | 19 | `api` 20 | 21 | The ArcGIS API for Python is a powerful, modern and easy to use Pythonic library to perform GIS visualization and analysis, spatial data management and GIS system administration tasks that can run both in an interactive fashion, as well as using scripts. 22 | 23 | It enables power users, system administrators and developers to leverage the rich SciPy ecosystem for automating their workflows and performing repetitive tasks using scripts. It integrates well with the Jupyter Notebook and enables academics, data scientists, GIS analysts and visualization enthusiasts to share geo-enriched literate programs and reproducible research with others. 24 | 25 | For more information, visit the [Homepage](https://developers.arcgis.com/python/). 26 | 27 | ### Miscellaneous 28 | 29 | `misc` 30 | 31 | For any other Python scripts that may not be directly related to ArcGIS, but are helpful across the framework. 32 | -------------------------------------------------------------------------------- /python/api/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/python/api/.gitignore -------------------------------------------------------------------------------- /python/api/add-attribute-index-hosted-feature-layer/Add Attribute Index to a Hosted Feature Layer.ipynb: -------------------------------------------------------------------------------- 1 | {"cells":[{"metadata":{},"cell_type":"markdown","source":"## Add Attribute Index to a Hosted Feature Layer\nThis notebook will add an attribute index to a hosted feature layer"},{"metadata":{"trusted":true},"cell_type":"code","source":"from arcgis.gis import GIS\nfrom arcgis.features import FeatureLayer\ngis = GIS(\"home\")","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Get a reference to the item that has the feature layer you want"},{"metadata":{"trusted":true},"cell_type":"code","source":"item = gis.content.get(\"a6e330c275dd421a9a3dda6e8e546f3d\")\nitem","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Create a `FeatureLayer` using its url"},{"metadata":{"trusted":true},"cell_type":"code","source":"fl = FeatureLayer(item['layers'][0].url)\nfl","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Specify the attribute index details"},{"metadata":{"trusted":true},"cell_type":"code","source":"indexes = {\n \"indexes\": [\n {\n \"name\": \"iso3_index\",\n \"fields\": \"iso3\",\n \"isAscending\": True,\n \"isUnique\": False,\n \"description\": \"iso3 country index\"\n }\n ]\n}","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Add the index to the feature layer's definition"},{"metadata":{"trusted":true},"cell_type":"code","source":"res = fl.manager.add_to_definition(indexes)\nres","execution_count":null,"outputs":[]}],"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"esriNotebookRuntime":{"notebookRuntimeName":"ArcGIS Notebook Python 3 Standard","notebookRuntimeVersion":"4.0"},"language_info":{"name":"python","version":"3.6.10","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat":4,"nbformat_minor":2} -------------------------------------------------------------------------------- /python/api/add-attribute-index-hosted-feature-layer/README.md: -------------------------------------------------------------------------------- 1 | ## Add Attribute Index to a Hosted Feature Layer 2 | 3 | Original Author: Adam Pfister 4 | 5 | This notebook will add an attribute index to a hosted feature layer using the ArcGIS API for Python's `add_to_definition` method. 6 | -------------------------------------------------------------------------------- /python/api/arcgis-online-admin/ArcGIS Online Administration.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''\n", 10 | "# @Title: ArcGIS Online Administration Notebook\n", 11 | "#\n", 12 | "# @Purpose: this notebook is a sample of how the ArcGIS Python\n", 13 | "# API can be used to adminster a portal. Specifically, this\n", 14 | "# notebook generates a report of: \n", 15 | "# 1) items in the portal, including those with the word 'test' in the tag or the title\n", 16 | "# 2) user status based on last login and activities\n", 17 | "# 3) Hosted feature services used in the org\n", 18 | "# The reports update a hosted table connected to a dashboard for interactive exploration. \n", 19 | "#\n", 20 | "# @Creator: ckwon@esri.com\n", 21 | "# @Credits: Building upon work done by Geospatial Center SEs\n", 22 | "# @Last Updated: March 2021\n", 23 | "#\n", 24 | "# @Versions: ArcGIS Python API v1.8.4\n", 25 | "#\n", 26 | "# @License: See end of notebook\n", 27 | "'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "# Import Libraries & Connect to the portal" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 2, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "#Import relevant libraries\n", 44 | "import io\n", 45 | "import csv\n", 46 | "import time\n", 47 | "import getpass\n", 48 | "import requests\n", 49 | "import datetime\n", 50 | "import pandas as pd\n", 51 | "from arcgis.features import FeatureLayer\n", 52 | "from arcgis.gis import GIS, Item, User\n", 53 | "from arcgis.gis.admin import License, LicenseManager" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 3, 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "name": "stdin", 63 | "output_type": "stream", 64 | "text": [ 65 | "Password: ············\n" 66 | ] 67 | } 68 | ], 69 | "source": [ 70 | "##REQUIRE USER INPUT##\n", 71 | "#Assign ArcGIS Online organization for administration\n", 72 | "org = 'https://esriaiddev.maps.arcgis.com/home'\n", 73 | "username = 'ckwon_aid'\n", 74 | "password = getpass.getpass(prompt='Password:')" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 4, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "#Connect to the GIS environment\n", 84 | "gis = GIS(url=org, username=username, password=password)" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": [ 91 | "# Item Administration" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 5, 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "data": { 101 | "text/html": [ 102 | "
\n", 103 | "\n", 116 | "\n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | "
idtitleownerfullnamesizecreatedmodifiedurl
0c13d470a1c024318ad221d75412649b4SIDW_Forum_Sponsorsssawaya_aidSalim Sawaya111052013-06-02 16:57:52.02021-03-22 19:41:53.0None
1d3bf9513eb6148e99df2436563fe3167SIDW_Forum_Sponsorsssawaya_aidSalim Sawaya573442013-06-02 16:57:55.02013-06-06 06:07:16.0http://services.arcgis.com/LG9Yn2oFqZi5PnO5/ar...
28bfad945962f4d40b0ae7ce7f74293e3SID-W 2013 Sponsors HQsssawaya_aidSalim Sawaya27812013-06-02 17:09:41.02013-06-06 17:02:16.0None
3b6f3ab78228f45578427ee1f055b0995SID-W 2013 Sponsorsssawaya_aidSalim Sawaya4882013-06-02 17:11:19.02013-06-02 21:19:29.0http://esriaiddev.maps.arcgis.com/apps/SocialM...
4cb2cbb56c69746b0b5b29753c6bf404bAgMarketFinderssawaya_aidSalim Sawaya02013-06-03 01:41:47.02013-06-03 05:45:51.0http://marketfinder.info/
...........................
539876fc35d2fac04119a1a8a684f216299cEnvironmental Impact Public Commentajenkins_EsriAidDevAdam Jenkins37212021-03-26 15:04:48.02021-03-26 15:09:18.0https://EsriAidDev.maps.arcgis.com/apps/Crowds...
5399dba564a23ac84b90bc4cfd2fc287a39fGlobal Stream Flow Web Map-AJajenkins_EsriAidDevAdam Jenkins30862021-03-26 19:20:39.02021-03-26 19:20:43.0None
54002edf8faf5cc84269b6aad2d930f4d0abStreamajenkins_EsriAidDevAdam Jenkins6702021-03-26 19:23:23.02021-03-26 19:24:53.0https://esriaiddev.maps.arcgis.com/apps/instan...
5401c40a3e5e9db3427b9804c836335c4a54Test Exampleajenkins_EsriAidDevAdam Jenkins9912021-03-26 19:26:35.02021-03-26 19:27:02.0https://esriaiddev.maps.arcgis.com/apps/instan...
54027095acc7482d4ca88f16febaab0a81beEsri Aid & Dev Feature Service Management Dash...ckwon_aidCalvin Kwon343142021-03-26 19:48:40.02021-03-28 19:19:07.0None
\n", 254 | "

5403 rows × 8 columns

\n", 255 | "
" 256 | ], 257 | "text/plain": [ 258 | " id \\\n", 259 | "0 c13d470a1c024318ad221d75412649b4 \n", 260 | "1 d3bf9513eb6148e99df2436563fe3167 \n", 261 | "2 8bfad945962f4d40b0ae7ce7f74293e3 \n", 262 | "3 b6f3ab78228f45578427ee1f055b0995 \n", 263 | "4 cb2cbb56c69746b0b5b29753c6bf404b \n", 264 | "... ... \n", 265 | "5398 76fc35d2fac04119a1a8a684f216299c \n", 266 | "5399 dba564a23ac84b90bc4cfd2fc287a39f \n", 267 | "5400 2edf8faf5cc84269b6aad2d930f4d0ab \n", 268 | "5401 c40a3e5e9db3427b9804c836335c4a54 \n", 269 | "5402 7095acc7482d4ca88f16febaab0a81be \n", 270 | "\n", 271 | " title owner \\\n", 272 | "0 SIDW_Forum_Sponsors ssawaya_aid \n", 273 | "1 SIDW_Forum_Sponsors ssawaya_aid \n", 274 | "2 SID-W 2013 Sponsors HQs ssawaya_aid \n", 275 | "3 SID-W 2013 Sponsors ssawaya_aid \n", 276 | "4 AgMarketFinder ssawaya_aid \n", 277 | "... ... ... \n", 278 | "5398 Environmental Impact Public Comment ajenkins_EsriAidDev \n", 279 | "5399 Global Stream Flow Web Map-AJ ajenkins_EsriAidDev \n", 280 | "5400 Stream ajenkins_EsriAidDev \n", 281 | "5401 Test Example ajenkins_EsriAidDev \n", 282 | "5402 Esri Aid & Dev Feature Service Management Dash... ckwon_aid \n", 283 | "\n", 284 | " fullname size created modified \\\n", 285 | "0 Salim Sawaya 11105 2013-06-02 16:57:52.0 2021-03-22 19:41:53.0 \n", 286 | "1 Salim Sawaya 57344 2013-06-02 16:57:55.0 2013-06-06 06:07:16.0 \n", 287 | "2 Salim Sawaya 2781 2013-06-02 17:09:41.0 2013-06-06 17:02:16.0 \n", 288 | "3 Salim Sawaya 488 2013-06-02 17:11:19.0 2013-06-02 21:19:29.0 \n", 289 | "4 Salim Sawaya 0 2013-06-03 01:41:47.0 2013-06-03 05:45:51.0 \n", 290 | "... ... ... ... ... \n", 291 | "5398 Adam Jenkins 3721 2021-03-26 15:04:48.0 2021-03-26 15:09:18.0 \n", 292 | "5399 Adam Jenkins 3086 2021-03-26 19:20:39.0 2021-03-26 19:20:43.0 \n", 293 | "5400 Adam Jenkins 670 2021-03-26 19:23:23.0 2021-03-26 19:24:53.0 \n", 294 | "5401 Adam Jenkins 991 2021-03-26 19:26:35.0 2021-03-26 19:27:02.0 \n", 295 | "5402 Calvin Kwon 34314 2021-03-26 19:48:40.0 2021-03-28 19:19:07.0 \n", 296 | "\n", 297 | " url \n", 298 | "0 None \n", 299 | "1 http://services.arcgis.com/LG9Yn2oFqZi5PnO5/ar... \n", 300 | "2 None \n", 301 | "3 http://esriaiddev.maps.arcgis.com/apps/SocialM... \n", 302 | "4 http://marketfinder.info/ \n", 303 | "... ... \n", 304 | "5398 https://EsriAidDev.maps.arcgis.com/apps/Crowds... \n", 305 | "5399 None \n", 306 | "5400 https://esriaiddev.maps.arcgis.com/apps/instan... \n", 307 | "5401 https://esriaiddev.maps.arcgis.com/apps/instan... \n", 308 | "5402 None \n", 309 | "\n", 310 | "[5403 rows x 8 columns]" 311 | ] 312 | }, 313 | "execution_count": 5, 314 | "metadata": {}, 315 | "output_type": "execute_result" 316 | } 317 | ], 318 | "source": [ 319 | "#From the REST URL, request all items in the organization. \n", 320 | "#Create a dictionary of the results, and print out the output. \n", 321 | "url = f'{gis.url}/sharing/rest/content/portals/{gis.properties.id}'\n", 322 | "\n", 323 | "params = {\n", 324 | " 'f': 'csv',\n", 325 | " 'token': gis._portal.con.token\n", 326 | "}\n", 327 | "\n", 328 | "#Get a string response from the request and construct a DataFrame\n", 329 | "csv_out = requests.get(url, params=params).text\n", 330 | "df = pd.read_csv(io.StringIO(str(csv_out)))\n", 331 | "\n", 332 | "#Replace NaN values in the df as presence of NaN results in error when adding features to the feature table\n", 333 | "df_filled = df.fillna('None')\n", 334 | "df_filled" 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": 6, 340 | "metadata": {}, 341 | "outputs": [ 342 | { 343 | "data": { 344 | "text/plain": [ 345 | "[{'attributes': {'id': 'c13d470a1c024318ad221d75412649b4',\n", 346 | " 'title': 'SIDW_Forum_Sponsors',\n", 347 | " 'owner': 'ssawaya_aid',\n", 348 | " 'fullname': 'Salim Sawaya',\n", 349 | " 'created': '2013-06-02 16:57:52.0',\n", 350 | " 'modified': '2021-03-22 19:41:53.0',\n", 351 | " 'url': 'None',\n", 352 | " 'size': 11105,\n", 353 | " 'size_mb': '0.011105',\n", 354 | " 'access': 'private',\n", 355 | " 'type': 'CSV',\n", 356 | " 'snippet': None,\n", 357 | " 'categories': '',\n", 358 | " 'numComments': 0,\n", 359 | " 'numRatings': 0,\n", 360 | " 'numViews': 1,\n", 361 | " 'scoreCompleteness': 33,\n", 362 | " 'tags': '',\n", 363 | " 'tag_status': 'no'}}]" 364 | ] 365 | }, 366 | "execution_count": 6, 367 | "metadata": {}, 368 | "output_type": "execute_result" 369 | } 370 | ], 371 | "source": [ 372 | "#Iterate through each record, add new item attributes, and determine if 'test' exists in the title & the tag.\n", 373 | "\n", 374 | "f_item = []\n", 375 | "attributeMap = ['id','title','owner','fullname','created','modified','url','size']\n", 376 | "\n", 377 | "for index, row in df_filled.iterrows():\n", 378 | " #Create an empty dictionary for each item\n", 379 | " f = {\"attributes\":{}}\n", 380 | " \n", 381 | " #Write the row of each record to the dictionary\n", 382 | " for field in attributeMap:\n", 383 | " f['attributes'][field] = row[field]\n", 384 | " \n", 385 | " #Then convert size in bytes to megabytes, format to six decimal places & update\n", 386 | " size_mb = int(row['size'])/1000000\n", 387 | " f['attributes']['size_mb'] = \"{:.6f}\".format(size_mb)\n", 388 | " \n", 389 | " #Get the item and include additional fields\n", 390 | " #Documentation on available fields: https://developers.arcgis.com/rest/users-groups-and-items/common-parameters.htm#ESRI_SECTION1_1FFBA7FE775B4BDA8D97524A6B9F7C98\n", 391 | " item = gis.content.get(itemid=row['id'])\n", 392 | " f['attributes']['access'] = item['access']\n", 393 | " f['attributes']['type'] = item['type']\n", 394 | " f['attributes']['snippet'] = item['snippet']\n", 395 | " f['attributes']['categories'] = str(item['categories']).strip('[]') #Convert to string & remove the []. A list cannot be inserted into a string field in the hosted table\n", 396 | " f['attributes']['numComments'] = item['numComments']\n", 397 | " f['attributes']['numRatings'] = item['numRatings']\n", 398 | " f['attributes']['numViews'] = item['numViews']\n", 399 | " f['attributes']['scoreCompleteness'] = item['scoreCompleteness']\n", 400 | " f['attributes']['tags'] = str(item['tags']).strip('[]') #Convert to string & remove the []. A list cannot be inserted into a string field in the hosted table\n", 401 | " \n", 402 | " #For each item, check if the title or tag contains the word 'test'\n", 403 | " #First check if the item has a title. If it does not, indicate the item. \n", 404 | " #Then check if the title has the word 'test,' followed by the tag\n", 405 | " #Assign a variable for title, tag, and id to simplify logic below\n", 406 | " item_title = f['attributes']['title']\n", 407 | " item_tag = f['attributes']['tags']\n", 408 | " item_id = f['attributes']['id']\n", 409 | " \n", 410 | " if isinstance(item_title, float): \n", 411 | " print(f'Item ID:{item_id} does not have a valid title')\n", 412 | " else: \n", 413 | " if 'test' in item_title:\n", 414 | " f['attributes']['tag_status'] = 'yes'\n", 415 | " elif item_tag != '':\n", 416 | " if 'test' in item_tag:\n", 417 | " f['attributes']['tag_status'] = 'yes'\n", 418 | " else:\n", 419 | " f['attributes']['tag_status'] = 'no' \n", 420 | " else:\n", 421 | " f['attributes']['tag_status'] = 'no' \n", 422 | " \n", 423 | " #Append the record to the list \n", 424 | " f_item.append(f)\n", 425 | "\n", 426 | "#Display the first 5 items as output\n", 427 | "f_item[:1]" 428 | ] 429 | }, 430 | { 431 | "cell_type": "code", 432 | "execution_count": 7, 433 | "metadata": {}, 434 | "outputs": [ 435 | { 436 | "name": "stdout", 437 | "output_type": "stream", 438 | "text": [ 439 | "5403 items were checked...\n", 440 | "267 items have the word \"test\" in the title or the tag.\n" 441 | ] 442 | } 443 | ], 444 | "source": [ 445 | "#Count the number of items where tag_status = 'yes' for reporting purposes \n", 446 | "count_yes = 0\n", 447 | "count_no = 0\n", 448 | "\n", 449 | "for record in f_item:\n", 450 | " tag_status = record.get('attributes',{}).get('tag_status')\n", 451 | " if tag_status == 'yes':\n", 452 | " count_yes += 1\n", 453 | " else:\n", 454 | " count_no += 1\n", 455 | "\n", 456 | "count_total = count_yes + count_no\n", 457 | " \n", 458 | "print(f'{count_total} items were checked...')\n", 459 | "print(f'{count_yes} items have the word \"test\" in the title or the tag.')" 460 | ] 461 | }, 462 | { 463 | "cell_type": "code", 464 | "execution_count": 8, 465 | "metadata": {}, 466 | "outputs": [ 467 | { 468 | "data": { 469 | "text/plain": [ 470 | "" 471 | ] 472 | }, 473 | "execution_count": 8, 474 | "metadata": {}, 475 | "output_type": "execute_result" 476 | } 477 | ], 478 | "source": [ 479 | "##REQUIRE USER INPUT##\n", 480 | "#Select the Hosted Table for update\n", 481 | "\n", 482 | "#If this is the first time running the notebook, create the hosted table with the following CSV. \n", 483 | "#https://esriis-my.sharepoint.com/:x:/g/personal/cal10660_esri_com/EYi8huq-F25AipgAONwVK5IB5NOOIFop9WaHXteq9TGJSw?e=ZCdFNa\n", 484 | "\n", 485 | "#Make sure to set the field types correctly.\n", 486 | "#String: id, title, owner, fullname, url, access, type, snippet, categories, tags, tag_status\n", 487 | "#Integer: numComments, numRatings, numViews, scoreCompleteness\n", 488 | "#Double: size, size_mb\n", 489 | "#Date: created, modified\n", 490 | "\n", 491 | "item_id = '4602725d51234a7ba6887b5b491d3b0e'\n", 492 | "dest_fl = gis.content.get(item_id).tables[0]\n", 493 | "dest_fl" 494 | ] 495 | }, 496 | { 497 | "cell_type": "code", 498 | "execution_count": 9, 499 | "metadata": {}, 500 | "outputs": [ 501 | { 502 | "name": "stdout", 503 | "output_type": "stream", 504 | "text": [ 505 | "success...\n" 506 | ] 507 | } 508 | ], 509 | "source": [ 510 | "#Update the Hosted Table with the item details\n", 511 | "dest_fl.delete_features(where=\"1=1\")\n", 512 | "result = dest_fl.edit_features(adds=f_item)\n", 513 | "print('success...')" 514 | ] 515 | }, 516 | { 517 | "cell_type": "markdown", 518 | "metadata": {}, 519 | "source": [ 520 | "# User Administration" 521 | ] 522 | }, 523 | { 524 | "cell_type": "code", 525 | "execution_count": 10, 526 | "metadata": {}, 527 | "outputs": [ 528 | { 529 | "name": "stdout", 530 | "output_type": "stream", 531 | "text": [ 532 | "95 users found...\n" 533 | ] 534 | } 535 | ], 536 | "source": [ 537 | "#Search for users in the organization \n", 538 | "search = 1000 #Make sure this value exceeds the no. of users in the org\n", 539 | "users_all = gis.users.search(max_users=search)\n", 540 | "users_count = len(users_all)\n", 541 | "print(f'{users_count} users found...')" 542 | ] 543 | }, 544 | { 545 | "cell_type": "code", 546 | "execution_count": 11, 547 | "metadata": {}, 548 | "outputs": [ 549 | { 550 | "data": { 551 | "text/plain": [ 552 | "[{'attributes': {'username': 'AGiron_aid',\n", 553 | " 'id': 'a64c4d9e27704dc2929b83a68947b436',\n", 554 | " 'fullName': 'Amanda Giron',\n", 555 | " 'availableCredits': 1000.0,\n", 556 | " 'assignedCredits': 1000.0,\n", 557 | " 'preferredView': None,\n", 558 | " 'email': 'AGiron@esri.com',\n", 559 | " 'lastLogin': 1607967224000,\n", 560 | " 'mfaEnabled': False,\n", 561 | " 'access': 'org',\n", 562 | " 'orgId': 'LG9Yn2oFqZi5PnO5',\n", 563 | " 'role': 'org_publisher',\n", 564 | " 'user_privileges': 'edit, bulkPublishFromDataStores, publishDynamicImagery, publishFeatures, publishScenes, publishServerServices, publishTiledImagery, publishTiles, registerDataStores, categorizeItems, createGroup, createItem, joinGroup, joinNonOrgGroup, shareGroupToOrg, shareGroupToPublic, shareToGroup, shareToOrg, shareToPublic, viewOrgGroups, viewOrgItems, viewOrgUsers, geoanalytics, demographics, elevation, featurereport, geocode, stored, temporary, geoenrichment, networkanalysis, closestfacility, locationallocation, optimizedrouting, origindestinationcostmatrix, routing, servicearea, vehiclerouting, spatialanalysis',\n", 565 | " 'userLicenseTypeId': 'advancedUT',\n", 566 | " 'disabled': False,\n", 567 | " 'region': 'WO',\n", 568 | " 'thumbnail': None,\n", 569 | " 'created': 1600455347000,\n", 570 | " 'modified': 1607967239000,\n", 571 | " 'groups': 1,\n", 572 | " 'provider': 'arcgis',\n", 573 | " 'item': 0,\n", 574 | " 'myEsri': 'arcgisonly'}}]" 575 | ] 576 | }, 577 | "execution_count": 11, 578 | "metadata": {}, 579 | "output_type": "execute_result" 580 | } 581 | ], 582 | "source": [ 583 | "#Iterate through each user, determine various properties, and create a list of dictionary\n", 584 | "#Refer to properties of User Object: https://developers.arcgis.com/rest/users-groups-and-items/user.htm\n", 585 | "f_user = []\n", 586 | "attributeMap = ['username','id','fullName','availableCredits','assignedCredits','preferredView', 'email','lastLogin','mfaEnabled','access',\n", 587 | " 'orgId','role','privileges','userLicenseTypeId','disabled', 'region','thumbnail','created','modified','groups','provider']\n", 588 | "\n", 589 | "for user in users_all:\n", 590 | " f = {\"attributes\":{}}\n", 591 | " for field in attributeMap: \n", 592 | " f['attributes'][field] = user[field]\n", 593 | " \n", 594 | " #Format privileges and add to the dictionary\n", 595 | " if field == 'privileges':\n", 596 | " privilege = user[field]\n", 597 | " privilege_ls = []\n", 598 | " i = 0\n", 599 | " while i < len(user[field]): #Since privileges assigned varies with each user, find the length of the list.\n", 600 | " privilege_str = user[field][i] #Take the i-th item in the list \n", 601 | " privilege_name = privilege_str.rsplit(':',1)[1] #Format from [portal]:[usertype]:[privilege] to [privilege]\n", 602 | " privilege_ls.append(privilege_name) #Append the formatted string to a list\n", 603 | " i = i + 1\n", 604 | " f['attributes']['user_privileges'] = str(privilege_ls).strip('[]').replace('\\'','') #Remove the brackets and quotation mark to insert the string to the dictionary.\n", 605 | " del f['attributes'][field] #Privilege is a SQL keyword so change to user_privileges. Delete Privileges key from the dict.\n", 606 | " \n", 607 | " #Count the number of groups\n", 608 | " if field == 'groups':\n", 609 | " group_count = len(user[field])\n", 610 | " f['attributes'][field] = group_count\n", 611 | " \n", 612 | " #Count the number of items owned by each user\n", 613 | " item_count = len(user.items(max_items = 1000)) #Max_item is defaulted to 100\n", 614 | " f['attributes']['item'] = item_count\n", 615 | " \n", 616 | " #Determine My Esri access status\n", 617 | " myesri_access = user.esri_access\n", 618 | " f['attributes']['myEsri'] = myesri_access\n", 619 | " \n", 620 | " #Append the record to the list \n", 621 | " f_user.append(f)\n", 622 | "\n", 623 | "#Display the first 5 items as output\n", 624 | "f_user[:1]" 625 | ] 626 | }, 627 | { 628 | "cell_type": "code", 629 | "execution_count": 12, 630 | "metadata": {}, 631 | "outputs": [ 632 | { 633 | "data": { 634 | "text/plain": [ 635 | "
" 636 | ] 637 | }, 638 | "execution_count": 12, 639 | "metadata": {}, 640 | "output_type": "execute_result" 641 | } 642 | ], 643 | "source": [ 644 | "##REQUIRE USER INPUT##\n", 645 | "#Select the Hosted Table for update\n", 646 | "\n", 647 | "#If this is the first time running the notebook, create the hosted table with the following CSV. \n", 648 | "#https://esriis-my.sharepoint.com/:x:/g/personal/cal10660_esri_com/EUPUqn1jvk9BptlGbTPmOEYBVuJOvsAUHjy5tsBXuLGimw?e=adOeGU\n", 649 | "\n", 650 | "#Make sure to set the field types correctly.\n", 651 | "#String: username, id, fullName, preferredView, email, mfaEnabled, access, orgId, role, privileges, userLicenseTypeId, disabled, region, thumbnail, provider, myEsri\n", 652 | "#Integer: storageUsage, storageQuota, groups, item\n", 653 | "#Double: availableCredits, assignedCredits\n", 654 | "#Date: lastLogin, created, modified\n", 655 | "\n", 656 | "item_id = '93c7b07deea146e58b42ca154f439a5e'\n", 657 | "dest_fl = gis.content.get(item_id).tables[0]\n", 658 | "dest_fl" 659 | ] 660 | }, 661 | { 662 | "cell_type": "code", 663 | "execution_count": 13, 664 | "metadata": {}, 665 | "outputs": [ 666 | { 667 | "name": "stdout", 668 | "output_type": "stream", 669 | "text": [ 670 | "success...\n" 671 | ] 672 | } 673 | ], 674 | "source": [ 675 | "#Update the Hosted Table with the item details\n", 676 | "dest_fl.delete_features(where=\"1=1\")\n", 677 | "result = dest_fl.edit_features(adds=f_user)\n", 678 | "print('success...')" 679 | ] 680 | }, 681 | { 682 | "cell_type": "markdown", 683 | "metadata": {}, 684 | "source": [ 685 | "# Hosted Feature Service Tracking" 686 | ] 687 | }, 688 | { 689 | "cell_type": "code", 690 | "execution_count": 14, 691 | "metadata": {}, 692 | "outputs": [], 693 | "source": [ 694 | "# @Purpose: Hosted feature service is the most versatile and frequently used type of layer in ArcGIS Online.\n", 695 | "# But with the versatility comes cost storing the layers and they are generally the leading source of credit consumption.\n", 696 | "# This notebook is meant to be run once a day to keep a running history of feature service usage in the organization \n", 697 | "# in a format that is easy to digest and explore, enabling the administrator to maintain oversight of usage. " 698 | ] 699 | }, 700 | { 701 | "cell_type": "code", 702 | "execution_count": 15, 703 | "metadata": {}, 704 | "outputs": [ 705 | { 706 | "name": "stdout", 707 | "output_type": "stream", 708 | "text": [ 709 | "1246 feature layers found in the organization...\n" 710 | ] 711 | }, 712 | { 713 | "data": { 714 | "text/plain": [ 715 | "[{'attributes': {'id': 'd3bf9513eb6148e99df2436563fe3167',\n", 716 | " 'title': 'SIDW_Forum_Sponsors',\n", 717 | " 'owner': 'ssawaya_aid',\n", 718 | " 'fullname': 'Salim Sawaya',\n", 719 | " 'created': '2013-06-02 16:57:55.0',\n", 720 | " 'modified': '2013-06-06 06:07:16.0',\n", 721 | " 'url': 'http://services.arcgis.com/LG9Yn2oFqZi5PnO5/arcgis/rest/services/SIDW_Forum_Sponsors/FeatureServer',\n", 722 | " 'size': 57344,\n", 723 | " 'size_mb': '0.057344',\n", 724 | " 'access': 'public',\n", 725 | " 'type': 'Feature Service',\n", 726 | " 'snippet': '',\n", 727 | " 'categories': '',\n", 728 | " 'numComments': 0,\n", 729 | " 'numRatings': 0,\n", 730 | " 'numViews': 170,\n", 731 | " 'scoreCompleteness': 33,\n", 732 | " 'tags': \"'SIDW'\",\n", 733 | " 'tag_status': 'no',\n", 734 | " 'date_updated': '2021/03/29',\n", 735 | " 'credit_cost': 0.01376256,\n", 736 | " 'remaining_credits': 50571.32,\n", 737 | " 'last_login': 1611754636000}}]" 738 | ] 739 | }, 740 | "execution_count": 15, 741 | "metadata": {}, 742 | "output_type": "execute_result" 743 | } 744 | ], 745 | "source": [ 746 | "#Take the f_item generated from the previous steps and create a new list with type = Feature Service\n", 747 | "fl_lst = []\n", 748 | "date_today = datetime.datetime.today().strftime('%Y/%m/%d') #Date when the code is run \n", 749 | "\n", 750 | "for items in f_item:\n", 751 | " if items['attributes']['type'] == 'Feature Service':\n", 752 | " items['attributes']['date_updated'] = date_today\n", 753 | " items['attributes']['credit_cost'] = float(items['attributes']['size_mb']) * .24 #Convert storage to credit cost\n", 754 | " items['attributes']['remaining_credits'] = gis.admin.credits.credits #Get the remaining credits in the organization\n", 755 | " fl_lst.append(items)\n", 756 | "\n", 757 | "#Add the user's last login information to the item\n", 758 | "for fl in fl_lst:\n", 759 | " item_owner = fl['attributes']['owner']\n", 760 | " \n", 761 | " for user in f_user:\n", 762 | " username = user['attributes']['username'] \n", 763 | " \n", 764 | " if item_owner == username:\n", 765 | " fl['attributes']['last_login'] = user['attributes']['lastLogin'] \n", 766 | "\n", 767 | "#Summarize the results\n", 768 | "fl_count = len(fl_lst)\n", 769 | "print(f'{fl_count} feature layers found in the organization...')\n", 770 | "fl_lst[:1]" 771 | ] 772 | }, 773 | { 774 | "cell_type": "code", 775 | "execution_count": 16, 776 | "metadata": {}, 777 | "outputs": [ 778 | { 779 | "data": { 780 | "text/plain": [ 781 | "
" 782 | ] 783 | }, 784 | "execution_count": 16, 785 | "metadata": {}, 786 | "output_type": "execute_result" 787 | } 788 | ], 789 | "source": [ 790 | "##REQUIRE USER INPUT##\n", 791 | "#Select the Hosted Table for update\n", 792 | "\n", 793 | "#If this is the first time running the notebook, create the hosted table with the following CSV. \n", 794 | "#https://esriis-my.sharepoint.com/:x:/g/personal/cal10660_esri_com/EeTMhaHSsK9EnjJwYrtnJJ4BNOx-qLU-dcJu_DC5i6rquQ?e=I9ZEDY\n", 795 | "\n", 796 | "#Make sure to set the field types correctly.\n", 797 | "#String: id, title, owner, fullname, url, access, type, snippet, categories, tags, tag_status\n", 798 | "#Integer: numComments, numRatings, numViews, scoreCompleteness\n", 799 | "#Double: size, size_mb\n", 800 | "#Date: created, modified, date_updated\n", 801 | "\n", 802 | "item_id = '4f9ea13534304f799a8378af30b63c72'\n", 803 | "dest_fl = gis.content.get(item_id).tables[0]\n", 804 | "dest_fl" 805 | ] 806 | }, 807 | { 808 | "cell_type": "code", 809 | "execution_count": 17, 810 | "metadata": {}, 811 | "outputs": [ 812 | { 813 | "name": "stdout", 814 | "output_type": "stream", 815 | "text": [ 816 | "success...\n" 817 | ] 818 | } 819 | ], 820 | "source": [ 821 | "#Update the Hosted Table with the item details\n", 822 | "dest_fl.delete_features(where=f'date_updated = \\'{date_today}\\'') #If the notebook is run more than once a day, delete any previous records from the same day. \n", 823 | "result = dest_fl.edit_features(adds=fl_lst)\n", 824 | "print('success...')" 825 | ] 826 | }, 827 | { 828 | "cell_type": "markdown", 829 | "metadata": {}, 830 | "source": [ 831 | "# License Administration" 832 | ] 833 | }, 834 | { 835 | "cell_type": "code", 836 | "execution_count": 18, 837 | "metadata": {}, 838 | "outputs": [], 839 | "source": [ 840 | "# licenses = gis.admin.license.all()\n", 841 | "\n", 842 | "# f_license = []\n", 843 | "\n", 844 | "# for license in licenses:\n", 845 | "# f = {\"attributes\":{}}\n", 846 | "# try:\n", 847 | "# report = license.report\n", 848 | "# #print(report)\n", 849 | "# for index, row in report.iterrows():\n", 850 | "# licensename = license.properties.listing.title\n", 851 | "# #print(licensename)\n", 852 | "# #print(report.shape[0])\n", 853 | "# if report.shape[0] > 1:\n", 854 | "# f['attributes']['Entitlement'] = row['Entitlement']\n", 855 | "# f['attributes']['Total'] = row['Total']\n", 856 | "# f['attributes']['Assigned'] = row['Assigned']\n", 857 | "# f['attributes']['Remaining'] = row['Remaining']\n", 858 | "# f_license.append(f)\n", 859 | "# except:\n", 860 | "# continue\n", 861 | " \n", 862 | "# f_license" 863 | ] 864 | }, 865 | { 866 | "cell_type": "markdown", 867 | "metadata": {}, 868 | "source": [ 869 | "# Licensing" 870 | ] 871 | }, 872 | { 873 | "cell_type": "code", 874 | "execution_count": 19, 875 | "metadata": {}, 876 | "outputs": [], 877 | "source": [ 878 | "# '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''\n", 879 | "# Copyright © 2021 Esri\n", 880 | "#\n", 881 | "# All rights reserved under the copyright laws of the United States \n", 882 | "# and applicable international laws, treaties, and conventions.\n", 883 | "# You may freely redistribute and use this sample code, with or \n", 884 | "# without modification, provided you include the original copyright \n", 885 | "# notice and use restrictions.\n", 886 | "#\n", 887 | "# Disclaimer: THE SAMPLE CODE IS PROVIDED \"AS IS\" AND ANY EXPRESS \n", 888 | "# OR IMPLIED WARRANTIES, INCLUDING THE IMPLIED WARRANTIES OF \n", 889 | "# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE \n", 890 | "# DISCLAIMED. IN NO EVENT SHALL ESRI OR CONTRIBUTORS BE LIABLE FOR\n", 891 | "# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL \n", 892 | "# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS \n", 893 | "# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) \n", 894 | "# SUSTAINED BY YOU OR A THIRD PARTY, HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \n", 895 | "# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ARISING IN ANY WAY OUT OF THE USE \n", 896 | "# OF THIS SAMPLE CODE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n", 897 | "#\n", 898 | "# For additional information, contact:\n", 899 | "# Esri\n", 900 | "# Attn: Contracts and Legal Services Department\n", 901 | "# 380 New York Street\n", 902 | "# Redlands, California, 92373-8100\n", 903 | "# USA\n", 904 | "# email: contracts@esri.com\n", 905 | "# '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''" 906 | ] 907 | } 908 | ], 909 | "metadata": { 910 | "kernelspec": { 911 | "display_name": "Python 3", 912 | "language": "python", 913 | "name": "python3" 914 | }, 915 | "language_info": { 916 | "codemirror_mode": { 917 | "name": "ipython", 918 | "version": 3 919 | }, 920 | "file_extension": ".py", 921 | "mimetype": "text/x-python", 922 | "name": "python", 923 | "nbconvert_exporter": "python", 924 | "pygments_lexer": "ipython3", 925 | "version": "3.7.9" 926 | } 927 | }, 928 | "nbformat": 4, 929 | "nbformat_minor": 4 930 | } 931 | -------------------------------------------------------------------------------- /python/api/arcgis-online-admin/README.md: -------------------------------------------------------------------------------- 1 | ## Manage Items and Credit Consumption in ArcGIS Online Organization 2 | 3 | Original Author: Calvin Kwon 4 | 5 | This notebook will help you manage AGOL Org. 6 | -------------------------------------------------------------------------------- /python/api/arcgis-online-admin/fl_details.csv: -------------------------------------------------------------------------------- 1 | id,title,owner,fullname,created,modified,url,size,size_mb,credit_cost,access,type,snippet,categories,numComments,numRatings,numViews,scoreCompleteness,tags,tag_status,date_updated,remaining_credits,last_login 2 | -------------------------------------------------------------------------------- /python/api/arcgis-online-admin/item_details.csv: -------------------------------------------------------------------------------- 1 | id,title,owner,fullname,created,modified,url,size,size_mb,access,type,snippet,categories,numComments,numRatings,numViews,scoreCompleteness,tags,tag_status 2 | -------------------------------------------------------------------------------- /python/api/arcgis-online-admin/user_details.csv: -------------------------------------------------------------------------------- 1 | username,id,fullName,availableCredits,assignedCredits,preferredView,email,lastLogin,mfaEnabled,access,orgId,role,user_privileges,userLicenseTypeId,disabled,region,thumbnail,created,modified,groups,provider,item,myEsri 2 | -------------------------------------------------------------------------------- /python/api/change-cache-control-hosted-feature-layer/Change the Cache Control for a Hosted Feature Layer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Change the Cache Control for a Hosted Feature Layer\n", 8 | "This python notebook will show you how to update the Cache Control option for an ArcGIS Online Feature Layer. For more information on this option, see the [help documentation](https://doc.arcgis.com/en/arcgis-online/manage-data/manage-hosted-feature-layers.htm#CDN)." 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "#### Run this cell to connect to your GIS and get started:" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "from arcgis.gis import GIS\n", 25 | "from arcgis.features import FeatureLayer\n", 26 | "gis = GIS(\"home\")" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "Get your Hosted Feature Layer Item" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": null, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "item = gis.content.get(\"\")\n", 43 | "item" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "Create a FeatureLayer from that item using its URL property" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "fl = FeatureLayer(item.url)\n", 60 | "fl" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "Specify the cache update duration\n", 68 | "\n", 69 | "Possible options are:\n", 70 | "```\n", 71 | "0 (Never)\n", 72 | "30 (30 seconds)\n", 73 | "60 (1 minute)\n", 74 | "120 (2 minutes)\n", 75 | "300 (5 minutes)\n", 76 | "900 (15 minutes)\n", 77 | "1800 (30 minutes)\n", 78 | "3600 (1 hour)\n", 79 | "```" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "cache_update = {\"cacheMaxAge\":3600}\n", 89 | "fl.manager.update_definition(cache_update)" 90 | ] 91 | } 92 | ], 93 | "metadata": { 94 | "esriNotebookRuntime": { 95 | "notebookRuntimeName": "ArcGIS Notebook Python 3 Standard", 96 | "notebookRuntimeVersion": "4.0" 97 | }, 98 | "kernelspec": { 99 | "display_name": "Python 3", 100 | "language": "python", 101 | "name": "python3" 102 | }, 103 | "language_info": { 104 | "codemirror_mode": { 105 | "name": "ipython", 106 | "version": 3 107 | }, 108 | "file_extension": ".py", 109 | "mimetype": "text/x-python", 110 | "name": "python", 111 | "nbconvert_exporter": "python", 112 | "pygments_lexer": "ipython3", 113 | "version": "3.6.10" 114 | } 115 | }, 116 | "nbformat": 4, 117 | "nbformat_minor": 2 118 | } -------------------------------------------------------------------------------- /python/api/change-cache-control-hosted-feature-layer/README.md: -------------------------------------------------------------------------------- 1 | ## Change the Cache Control for a Hosted Feature Layer 2 | 3 | Original Author: Adam Pfister 4 | 5 | This python notebook will show you how to update the Cache Control option for an ArcGIS Online Feature Layer. For more information on this option, see the [help documentation](https://doc.arcgis.com/en/arcgis-online/manage-data/manage-hosted-feature-layers.htm#CDN). 6 | -------------------------------------------------------------------------------- /python/api/csv-to-hosted-table-service/Publish CSV file as Hosted Feature Layer or Table.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Publishing a CSV file to a FeatureLayer/Table" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "#### Run this cell to connect to your GIS and get started:" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "from arcgis.gis import GIS\n", 24 | "gis = GIS(\"home\")" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "csv_dataset = '/arcgis/home/sdg-test.csv'" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "Setup basic Item Properties for the CSV Item" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "item_properties = {\n", 50 | " 'title': 'csv as a table service',\n", 51 | " 'description': 'a csv file is now a table service hosted in arcgis online',\n", 52 | " 'tags': 'csv, table, service'\n", 53 | "}" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "Add (upload) the CSV file to your Content" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "csv_item = gis.content.add(item_properties=item_properties, data=csv_dataset)\n", 70 | "csv_item" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "Analyze the CSV item to get the publishing parameters used in the final `.publish` call" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "analyze_result = gis.content.analyze(item=csv_item)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "Make list of fields we want to make sure is published as Text/String types" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "to_string_fields = ['Goal']" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "for field in analyze_result['publishParameters']['layerInfo']['fields']:\n", 112 | " if (field['name'] in to_string_fields):\n", 113 | " field['type'] = 'esriFieldTypeString'\n", 114 | " field['sqlType'] = 'sqlTypeNVarchar'\n", 115 | " " 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "Double check to make sure the changes went through" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "analyze_result['publishParameters']['layerInfo']['fields']" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "If you want to ensure that the service you publish is a Table Service, set the `locationType` to `'none'`" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "analyze_result['publishParameters']['locationType'] = 'none'" 148 | ] 149 | }, 150 | { 151 | "cell_type": "markdown", 152 | "metadata": {}, 153 | "source": [ 154 | "Get a reference to the `publishParameters` that came back from the `.analyze` call" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "metadata": {}, 161 | "outputs": [], 162 | "source": [ 163 | "publish_params = analyze_result['publishParameters']" 164 | ] 165 | }, 166 | { 167 | "cell_type": "markdown", 168 | "metadata": {}, 169 | "source": [ 170 | "Publish the CSV item as a hosted feature layer or table layer" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "metadata": {}, 177 | "outputs": [], 178 | "source": [ 179 | "published_item = csv_item.publish(publish_parameters=publish_params)\n", 180 | "published_item" 181 | ] 182 | } 183 | ], 184 | "metadata": { 185 | "esriNotebookRuntime": { 186 | "notebookRuntimeName": "ArcGIS Notebook Python 3 Standard", 187 | "notebookRuntimeVersion": "4.0" 188 | }, 189 | "kernelspec": { 190 | "display_name": "Python 3", 191 | "language": "python", 192 | "name": "python3" 193 | }, 194 | "language_info": { 195 | "codemirror_mode": { 196 | "name": "ipython", 197 | "version": 3 198 | }, 199 | "file_extension": ".py", 200 | "mimetype": "text/x-python", 201 | "name": "python", 202 | "nbconvert_exporter": "python", 203 | "pygments_lexer": "ipython3", 204 | "version": "3.6.10" 205 | } 206 | }, 207 | "nbformat": 4, 208 | "nbformat_minor": 2 209 | } 210 | -------------------------------------------------------------------------------- /python/api/csv-to-hosted-table-service/README.md: -------------------------------------------------------------------------------- 1 | ## Publish CSV file as a Hosted Feature Layer/Table 2 | 3 | Original Author: Adam Pfister 4 | 5 | This python notebook will walk you through the steps of publishing a CSV file as a hosted feature layer or table. Additionally, you can specifiy that certain fields will be published as a string/text type instead of a numeric type. 6 | 7 | ### Customer or Use Case 8 | 9 | Users may not know that the ability exists to publish a dataset as a hosted table service, without geometry, and still use it within the ArcGIS platform. 10 | -------------------------------------------------------------------------------- /python/api/delete-fields-from-hosted-feature-layer/Delete Fields from a Hosted Feature Layer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Delete Fields from a Hosted Feature Layer" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "from arcgis import GIS\n", 17 | "from arcgis.features import FeatureLayer" 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "#### Connect to your ArcGIS Online Organization" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "gis = GIS('home')" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "#### Get the Item reference for your Feature Layer" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "item = gis.content.get('5272ba7d4ac84a95982305be78391d8d')\n", 50 | "item" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "#### Get the Feature Layer using the URL\n", 58 | "In this example, we only have one layer in our Feature Layer so we can safely use the `item.layers[0].url` line" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "fl = FeatureLayer(item.layers[0].url)\n", 68 | "fl" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "#### Create a JSON Object to define which fields we want to delete" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "fields_to_delete = {\n", 85 | " \"fields\": [\n", 86 | " { \"name\" : \"CreationDate_1\" },\n", 87 | " { \"name\" : \"Creator_1\" },\n", 88 | " { \"name\" : \"EditDate_1\" },\n", 89 | " { \"name\" : \"Editor_1\" }\n", 90 | " ]\n", 91 | "}" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "#### Call the `delete_from_definition` method to remove the fields from the Feature Layer" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "res = fl.manager.delete_from_definition(fields_to_delete)\n", 108 | "res" 109 | ] 110 | } 111 | ], 112 | "metadata": { 113 | "kernelspec": { 114 | "display_name": "Python 3", 115 | "language": "python", 116 | "name": "python3" 117 | }, 118 | "language_info": { 119 | "codemirror_mode": { 120 | "name": "ipython", 121 | "version": 3 122 | }, 123 | "file_extension": ".py", 124 | "mimetype": "text/x-python", 125 | "name": "python", 126 | "nbconvert_exporter": "python", 127 | "pygments_lexer": "ipython3", 128 | "version": "3.7.3" 129 | } 130 | }, 131 | "nbformat": 4, 132 | "nbformat_minor": 4 133 | } 134 | -------------------------------------------------------------------------------- /python/api/delete-fields-from-hosted-feature-layer/README.md: -------------------------------------------------------------------------------- 1 | ## Delete Fields from a Hosted Feature Layer 2 | 3 | Original Author: Adam Pfister 4 | 5 | This python notebook will walk you through the steps of deleting fields from an ArcGIS Online Feature Layer 6 | -------------------------------------------------------------------------------- /python/api/get-portal-usage-by-user/Get Portal Usage by User.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Get Portal Usage by User\n" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "#### Run this cell to connect to your GIS and get started:" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import requests\n", 24 | "import io\n", 25 | "from arcgis.gis import GIS\n", 26 | "import pandas as pd\n", 27 | "gis = GIS(\"home\")" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "#### Get portal properties to use in the request" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 7, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "portal_id = gis.properties.id\n", 44 | "portal_key = gis.properties.urlKey.lower()\n", 45 | "token = gis._portal.con.token" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "metadata": {}, 51 | "source": [ 52 | "#### Make the request to the Portal REST API" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": null, 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "url = f'https://{portal_key}.maps.arcgis.com/sharing/rest/content/portals/{portal_id}'\n", 62 | "\n", 63 | "params = {\n", 64 | " 'f': 'csv',\n", 65 | " 'sortField': 'size',\n", 66 | " 'sortOrder': 'desc',\n", 67 | " 'types': 'Feature Service',\n", 68 | " 'reservedTypeKeyword': 'Hosted Service',\n", 69 | " 'token': token\n", 70 | "}\n", 71 | "csv_out = requests.get(url, params=params)\n", 72 | "csv_out" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "#### Pipe the results into a Pandas Dataframe and return the first 10 records to preview" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "df = pd.read_csv(io.StringIO(csv_out.text))\n", 89 | "df.head(10)" 90 | ] 91 | } 92 | ], 93 | "metadata": { 94 | "esriNotebookRuntime": { 95 | "notebookRuntimeName": "ArcGIS Notebook Python 3 Advanced", 96 | "notebookRuntimeVersion": "3.0" 97 | }, 98 | "kernelspec": { 99 | "display_name": "Python 3", 100 | "language": "python", 101 | "name": "python3" 102 | }, 103 | "language_info": { 104 | "codemirror_mode": { 105 | "name": "ipython", 106 | "version": 3 107 | }, 108 | "file_extension": ".py", 109 | "mimetype": "text/x-python", 110 | "name": "python", 111 | "nbconvert_exporter": "python", 112 | "pygments_lexer": "ipython3", 113 | "version": "3.6.9" 114 | } 115 | }, 116 | "nbformat": 4, 117 | "nbformat_minor": 2 118 | } 119 | -------------------------------------------------------------------------------- /python/api/get-portal-usage-by-user/README.md: -------------------------------------------------------------------------------- 1 | ## Get Portal Usage by User 2 | 3 | Original Author: Adam Pfister 4 | 5 | This python notebook will walk you through the steps of querying the Portal REST API to return usage statistics by named user. 6 | -------------------------------------------------------------------------------- /python/api/ocr-scrape-coordinates-from-image-pixels/README.md: -------------------------------------------------------------------------------- 1 | ## Scrape Coordinates from Image Pixels (OCR) 2 | 3 | Original Author: Matthew Berra 4 | 5 | This python notebook uses Optical Character Recognition to scrape coordintes off the pixels of an image (jpg, gif, etc.). It rqeuires the installation of Tesseract on the machine that is running it. 6 | 7 | Power_Plants.zip contains a set of imagery products that can be used for demonstrations. 8 | -------------------------------------------------------------------------------- /python/api/ocr-scrape-coordinates-from-image-pixels/ocr-scrpae-coordinates-from-image-pixels.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import os\n", 10 | "import datetime\n", 11 | "import pytesseract\n", 12 | "from PIL import Image" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "images_path = \"C:\\\\1_projects\\\\138_fedgis2021\\\\OCR\\\\images\\\\\"\n", 22 | "pytesseract.pytesseract.tesseract_cmd = r'C:\\Program Files\\Tesseract-OCR\\tesseract.exe'" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "arcpy.env.workspace = images_path\n", 32 | "current_time = datetime.datetime.now().strftime(\"%B_%d_%Y_%I_%M_%S%p\")\n", 33 | "arcpy.CreateFileGDB_management(images_path, current_time + '.gdb')\n", 34 | "SR = arcpy.SpatialReference(4326)\n", 35 | "new_point = arcpy.CreateFeatureclass_management(images_path + current_time + '.gdb', \"Images\", 'POINT', spatial_reference=SR)\n", 36 | "fc = new_point[0]\n", 37 | "arcpy.AddField_management(fc, \"Name\", \"TEXT\", \"\", \"\", 100, \"Name\")\n", 38 | "arcpy.AddField_management(fc, \"City\", \"TEXT\", \"\", \"\", 100, \"City\")\n", 39 | "arcpy.AddField_management(fc, \"File\", \"TEXT\", \"\", \"\", 100, \"File\")\n", 40 | "\n", 41 | "for filename in os.listdir(images_path):\n", 42 | " if filename.endswith(\".JPG\") or filename.endswith(\".PNG\") or filename.endswith(\".jpg\") or filename.endswith(\".png\"):\n", 43 | " text = pytesseract.image_to_string(Image.open(images_path + filename))\n", 44 | " split_all = text.split(\"\\n\")\n", 45 | " name = split_all[0]\n", 46 | " city = split_all[1]\n", 47 | " coords = split_all[2].split(\" Lng: \")\n", 48 | " lat = coords[0].replace(\"Lat: \", \"\")\n", 49 | " lng = coords[1]\n", 50 | " with arcpy.da.InsertCursor(fc, ['SHAPE@', 'Name', \"City\", \"File\"]) as cursor:\n", 51 | " coordinates = arcpy.Point(lng,lat)\n", 52 | " cursor.insertRow((coordinates, name, city, filename))\n", 53 | " print(name + \" added.\")\n", 54 | " \n", 55 | "print(\"\\nExport complete.\")" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [] 64 | } 65 | ], 66 | "metadata": { 67 | "kernelspec": { 68 | "display_name": "ArcGISPro", 69 | "language": "Python", 70 | "name": "python3" 71 | }, 72 | "language_info": { 73 | "file_extension": ".py", 74 | "name": "python", 75 | "version": "3" 76 | } 77 | }, 78 | "nbformat": 4, 79 | "nbformat_minor": 2 80 | } 81 | -------------------------------------------------------------------------------- /python/api/plot-images-from-exif-label-from-wikipedia/README.md: -------------------------------------------------------------------------------- 1 | ## Plot Images from EXIF and Label with Wikipedia Data 2 | 3 | Original Author: Matthew Berra 4 | 5 | This python notebook walks you through the process of naming and plotting a folder of images in a geodatabase. It was designed to be used inside of ArcGIS Pro. The script pulls coordinates from each image's exif data and names each image based on the closest location that it can find in wikipedia. It also provides a link to the title's wikipedia page. 6 | 7 | The Rome.zip file contains sample data that can be used with the code. 8 | -------------------------------------------------------------------------------- /python/api/plot-images-from-exif-label-from-wikipedia/Rome.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/python/api/plot-images-from-exif-label-from-wikipedia/Rome.zip -------------------------------------------------------------------------------- /python/api/plot-images-from-exif-label-from-wikipedia/plot-images-from-exif-and-label-with-wikipedia.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Import python packages. Some may need to be installed using the Python Package Manager.\n", 10 | "\n", 11 | "import os\n", 12 | "import datetime\n", 13 | "import exifread\n", 14 | "from PIL import Image\n", 15 | "import wikipedia" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "#This is the only variable that needs to be set. It is the path to the folder of images.\n", 25 | "\n", 26 | "path = \"C:\\\\1_projects\\\\138_fedgis2021\\\\images\\\\\"" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "#This function converts the coordinates contained in the exif to decimal degrees.\n", 36 | "\n", 37 | "def _convert_to_degress(value):\n", 38 | " d = float(value.values[0].num) / float(value.values[0].den)\n", 39 | " m = float(value.values[1].num) / float(value.values[1].den)\n", 40 | " s = float(value.values[2].num) / float(value.values[2].den)\n", 41 | "\n", 42 | " return d + (m / 60.0) + (s / 3600.0)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "#This code reads the exif data, scrapes wikipedia, and plots each location in a geodatabase.\n", 52 | "\n", 53 | "arcpy.env.workspace = path\n", 54 | "current_time = datetime.datetime.now().strftime(\"%B_%d_%Y_%I_%M_%S%p\")\n", 55 | "arcpy.CreateFileGDB_management(path, current_time + '.gdb')\n", 56 | "SR = arcpy.SpatialReference(4326)\n", 57 | "new_point = arcpy.CreateFeatureclass_management(path + current_time + '.gdb', \"Pictures\", 'POINT', spatial_reference=SR)\n", 58 | "fc = new_point[0]\n", 59 | "arcpy.AddField_management(fc, \"Name\", \"TEXT\", \"\", \"\", 100, \"Name\")\n", 60 | "arcpy.AddField_management(fc, \"Image_Link\", \"TEXT\", \"\", \"\", 100, \"Image_Link\")\n", 61 | "arcpy.AddField_management(fc, \"Wiki_Link\", \"TEXT\", \"\", \"\", 200, \"Wiki_link\")\n", 62 | " \n", 63 | "\n", 64 | "for filename in os.listdir(path):\n", 65 | " if filename.endswith(\".JPG\") or filename.endswith(\".PNG\") or filename.endswith(\".jpg\") or filename.endswith(\".png\"):\n", 66 | " im = Image.open(os.path.join(path, filename))\n", 67 | " tags = {}\n", 68 | " with open(os.path.join(path, filename), 'rb') as f:\n", 69 | " tags = exifread.process_file(f, details=False)\n", 70 | " if \"GPS GPSLatitude\" in tags.keys():\n", 71 | " lat = _convert_to_degress(tags[\"GPS GPSLatitude\"])\n", 72 | " latRef = tags[\"GPS GPSLatitudeRef\"]\n", 73 | " lngRef = tags[\"GPS GPSLongitudeRef\"]\n", 74 | " if str(latRef) == 'S':\n", 75 | " lat = -lat\n", 76 | " lng = _convert_to_degress(tags[\"GPS GPSLongitude\"])\n", 77 | " if str(lngRef) == 'W':\n", 78 | " lng = -lng\n", 79 | " name_search = wikipedia.geosearch(lat, lng, results=1, radius=10000)\n", 80 | " name = name_search[0]\n", 81 | " wiki = \"https://en.wikipedia.org/wiki/\" + name.replace(\" \", \"_\")\n", 82 | " with arcpy.da.InsertCursor(fc, ['SHAPE@', 'Name', \"Image_Link\", \"Wiki_link\"]) as cursor:\n", 83 | " coordinates = arcpy.Point(lng,lat)\n", 84 | " cursor.insertRow((coordinates, str(name).replace(\",\",\"\"), filename, wiki))\n", 85 | " \n", 86 | "print(\"Export complete.\")" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [] 95 | } 96 | ], 97 | "metadata": { 98 | "kernelspec": { 99 | "display_name": "ArcGISPro", 100 | "language": "Python", 101 | "name": "python3" 102 | }, 103 | "language_info": { 104 | "file_extension": ".py", 105 | "name": "python", 106 | "version": "3" 107 | } 108 | }, 109 | "nbformat": 4, 110 | "nbformat_minor": 2 111 | } 112 | -------------------------------------------------------------------------------- /python/api/predict-satellite-imaging-windows/README.md: -------------------------------------------------------------------------------- 1 | ## Predict Satellite Image Windows 2 | 3 | Original Author: Matthew Berra 4 | 5 | This python notebook walks you through the process of calculating satellite imaging windows by using the skyfield python package. It was designed to run inside of ArcGIS Pro and requires installing skyfield through the Python Package Manager. 6 | -------------------------------------------------------------------------------- /python/api/publish-hosted-feature-layer-not-editable/Publish a Hosted Feature Layer without Editing enabled.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Publish a Hosted Feature Layer without Editing enabled\n", 8 | "This python notebook will show you how to publish an ArcGIS Online Feature Layer without the `Edit` capability, removing the option for users to create and/or add new features.\n", 9 | "\n", 10 | "In this example, we will use a CSV file as our source to publish from" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "#### Run this cell to connect to your GIS and get started:" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "from arcgis.gis import GIS\n", 27 | "from arcgis.features import FeatureLayer\n", 28 | "gis = GIS(\"home\")" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "Get your Hosted Feature Layer Item" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "csv_item = gis.content.get(\"\")\n", 45 | "csv_item" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "metadata": {}, 51 | "source": [ 52 | "Analyze the CSV file and inspect the default capabilities" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": null, 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "analyze_result = gis.content.analyze(item=csv_item)\n", 62 | "publish_params = analyze_result['publishParameters']\n", 63 | "publish_params['layerInfo']['capabilities']" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "Change the default capabilities to only be `Query` and then make sure the changes are applied" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": null, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "publish_params['layerInfo']['capabilities'] = 'Query'\n", 80 | "publish_params['layerInfo']['capabilities']" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "Publish the Item without Editing enabled" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "csv_item.publish(publish_parameters=publish_params)\n", 97 | "csv_item" 98 | ] 99 | } 100 | ], 101 | "metadata": { 102 | "esriNotebookRuntime": { 103 | "notebookRuntimeName": "ArcGIS Notebook Python 3 Standard", 104 | "notebookRuntimeVersion": "4.0" 105 | }, 106 | "kernelspec": { 107 | "display_name": "Python 3", 108 | "language": "python", 109 | "name": "python3" 110 | }, 111 | "language_info": { 112 | "codemirror_mode": { 113 | "name": "ipython", 114 | "version": 3 115 | }, 116 | "file_extension": ".py", 117 | "mimetype": "text/x-python", 118 | "name": "python", 119 | "nbconvert_exporter": "python", 120 | "pygments_lexer": "ipython3", 121 | "version": "3.6.10" 122 | } 123 | }, 124 | "nbformat": 4, 125 | "nbformat_minor": 2 126 | } -------------------------------------------------------------------------------- /python/api/publish-hosted-feature-layer-not-editable/README.md: -------------------------------------------------------------------------------- 1 | ## Publish a Hosted Feature Layer without Editing enabled 2 | 3 | Original Author: Adam Pfister 4 | 5 | This python notebook will show you how to publish an ArcGIS Online Feature Layer without the `Edit` capability, removing the option for users to create and/or add new features. 6 | 7 | In this example, we will use a CSV file as our source to publish from 8 | -------------------------------------------------------------------------------- /python/api/scrape-wikipedia-for-external-coordinates/README.md: -------------------------------------------------------------------------------- 1 | ## Scrape Wikipedia page for External Coordinates 2 | 3 | Original Author: Matthew Berra 4 | 5 | This python notebook walks you through the process of scraping coordinates from Wikipedia pages. It iterates through every internal link on the page and searches each one for coordinates. This script was designed to be used in ArcGIS Pro. 6 | -------------------------------------------------------------------------------- /python/api/scrape-wikipedia-for-external-coordinates/scrape-wikipedia-page-for-external-coordinates.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 9, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Import the Wikipedia Python Package. It may need to be installed using the Python Package Manager.\n", 10 | "\n", 11 | "import wikipedia" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 10, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "#This is the only variable you need to change. It must match the name of a wikipedia article (not a redirect).\n", 21 | "\n", 22 | "topic = \"National Geospatial-Intelligence Agency\"" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 11, 28 | "metadata": {}, 29 | "outputs": [ 30 | { 31 | "name": "stdout", 32 | "output_type": "stream", 33 | "text": [ 34 | "Location, lat, long\n", 35 | "Agencja Wywiadu, 52.19916, 20.99382\n", 36 | "Alternate National Military Command Center, 39.73389, -77.41944\n", 37 | "Antarctica, -90.00000, 0.00000\n", 38 | "Australian Secret Intelligence Service, -35.30986, 149.13057\n", 39 | "Australian Security Intelligence Organisation, -35.29267, 149.14447\n", 40 | "Australian Signals Directorate, -35.29520, 149.14870\n", 41 | "BNF (identifier), 48.83361, 2.37583\n", 42 | "Bay St. Louis, Mississippi, 30.31472, -89.34417\n", 43 | "Belgrade, 44.81667, 20.46667\n", 44 | "Bethesda, Maryland, 38.98472, -77.11306\n", 45 | "Bolivarian Intelligence Service, 10.49540, -66.88260\n", 46 | "Brazilian Intelligence Agency, -15.82100, -47.94000\n", 47 | "Brookmont, Maryland, 38.95583, -77.12722\n", 48 | "CIA University, 38.91780, -77.42720\n", 49 | "Canadian Security Intelligence Service, 45.43740, -75.61400\n", 50 | "Centro de Investigación y Seguridad Nacional (México), 19.30400, -99.23600\n", 51 | "Communications Security Establishment, 45.43400, -75.61600\n", 52 | "Cuba, 40.00000, -100.00000\n", 53 | "Dalecarlia Reservoir, 38.94251, -77.11004\n", 54 | "Danish Defence Intelligence Service, 55.69111, 12.59389\n", 55 | "Defense Contract Audit Agency, 38.71974, -77.15458\n", 56 | "Defense Mapping Agency, 38.75320, -77.19690\n", 57 | "Defense Threat Reduction Agency, 38.71700, -77.16200\n", 58 | "Dirección de Inteligencia, 23.13840, -82.39860\n", 59 | "Direction générale de la surveillance du territoire, 33.93503, -6.86353\n", 60 | "Directorate-General for External Security, 48.87440, 2.40670\n", 61 | "Elektronik ve Teknik İstihbarat Başkanlığı, 39.94190, 32.85433\n", 62 | "Estonian Internal Security Service, 59.43114, 24.73793\n", 63 | "Falls Church, Virginia, 38.88583, -77.17222\n", 64 | "Federal Bureau of Investigation, 38.89520, -77.02510\n", 65 | "Federal Intelligence Agency, -34.60700, -58.37060\n", 66 | "Federal Intelligence Service (Germany), 48.06400, 11.53500\n", 67 | "Federal Office for the Protection of the Constitution, 51.01944, 6.89139\n", 68 | "Finnish Security Intelligence Service, 60.16190, 24.94090\n", 69 | "Foreign Intelligence Service (Russia), 55.58400, 37.51700\n", 70 | "Fort Belvoir, 38.71974, -77.15458\n", 71 | "Fort Belvoir, VA, 38.71974, -77.15458\n", 72 | "Garda Crime and Security Branch, 53.35372, -6.29847\n", 73 | "General Security Directorate (Iraq), 33.33000, 44.48000\n", 74 | "General Security Directorate (Syria), 33.52028, 36.31167\n", 75 | "Government Communications Headquarters, 51.89944, -2.12444\n", 76 | "Government Communications Security Bureau, -41.27566, 174.78117\n", 77 | "Indonesian State Intelligence Agency, -6.26872, 106.85135\n", 78 | "Intelligence Community Campus-Bethesda, 38.95000, -77.12250\n", 79 | "Intelligence Division (Finland), 60.16418, 24.94967\n", 80 | "Internal Security Department (Singapore), 1.32349, 103.84409\n", 81 | "Internal Security Organisation, 0.32194, 32.58250\n", 82 | "Microsoft, 47.63972, -122.12833\n", 83 | "Military Intelligence Directorate (Syria), 33.51405, 36.27849\n", 84 | "Ministry of Intelligence (Iran), 35.75100, 51.45700\n", 85 | "Ministry of State Security (China), 39.99210, 116.27830\n", 86 | "NATO bombing of the Chinese embassy in Belgrade, 44.82500, 20.41900\n", 87 | "NLI (identifier), 31.77617, 35.19681\n", 88 | "National Archives and Records Administration, 38.89267, -77.02294\n", 89 | "National Counterproliferation Center, 38.93300, -77.20500\n", 90 | "National Counterterrorism Center, 38.93300, -77.20500\n", 91 | "National Defense University, 38.86600, -77.01500\n", 92 | "National Intelligence Organization (Turkey), 39.94190, 32.85433\n", 93 | "National Intelligence University, 38.84240, -77.01620\n", 94 | "National Military Command Center, 38.87111, -77.05556\n", 95 | "National Photographic Interpretation Center, 38.75320, -77.19690\n", 96 | "National Reconnaissance Office, 38.88194, -77.45028\n", 97 | "National Security Agency, 39.10889, -76.77139\n", 98 | "National Security Agency (Chad), 39.10889, -76.77139\n", 99 | "Nationale SIGINT Organisatie, 53.28333, 6.21389\n", 100 | "Open Source Center, 38.95520, -77.36020\n", 101 | "Osama bin Laden's compound in Abbottabad, 34.16931, 73.24244\n", 102 | "Pakistan, 30.00000, 70.00000\n", 103 | "Palace Office (Oman), 23.58917, 58.47806\n", 104 | "Pokhran-II, 27.07884, 71.72211\n", 105 | "Pretoria, -25.74611, 28.18806\n", 106 | "Reference Elevation Model of Antarctica, -90.00000, 0.00000\n", 107 | "Research Department Unit, 45.60833, 126.63194\n", 108 | "Reston, VA, 38.95444, -77.34639\n", 109 | "Ronald Reagan Building and International Trade Center, 38.89389, -77.03083\n", 110 | "Secret Intelligence Service, 51.48722, -0.12417\n", 111 | "Sinyal İstihbaratı Başkanlığı, 39.94190, 32.85433\n", 112 | "Special Collection Service, 39.04500, -76.85700\n", 113 | "Special Detective Unit, 53.33431, -6.26397\n", 114 | "Springfield, Virginia, 38.78861, -77.17944\n", 115 | "St. Louis, Missouri, 38.62722, -90.19778\n", 116 | "St. Louis Arsenal, 38.59062, -90.20850\n", 117 | "State Intelligence Services (The Gambia), 13.46110, -16.58110\n", 118 | "State Security Committee of the Republic of Belarus, 53.89889, 27.55444\n", 119 | "State Security Department, 39.07472, 125.76861\n", 120 | "State Security Service (Belgium), 50.85769, 4.35516\n", 121 | "State Security and Intelligence Service, 45.43740, -75.61400\n", 122 | "Statue of Liberty, 40.68917, -74.04444\n", 123 | "Stennis Space Center, 30.36277, -89.60020\n", 124 | "Suitland-Silver Hill, Maryland, 38.85056, -76.92361\n", 125 | "Swedish Military Intelligence and Security Service, 59.34639, 18.08722\n", 126 | "The Pentagon, 38.87095, -77.05526\n", 127 | "Tubbataha Reef, 8.95333, 119.86750\n", 128 | "U.S. Geological Survey, 38.94700, -77.36750\n", 129 | "USN, 40.00000, -100.00000\n", 130 | "US DoD, 38.87111, -77.05583\n", 131 | "Uniformed Services University of the Health Sciences, 39.00120, -77.08590\n", 132 | "United States Army War College, 40.21100, -77.17300\n", 133 | "United States Department of Defense, 38.87111, -77.05583\n", 134 | "United States Department of Energy, 38.88694, -77.02611\n", 135 | "United States Department of Homeland Security, 38.85470, -77.00000\n", 136 | "United States Department of Justice, 38.89333, -77.02500\n", 137 | "United States Department of the Treasury, 38.89833, -77.03417\n", 138 | "United States Naval Observatory, 38.92147, -77.06695\n", 139 | "Virginia, 37.52150, -78.85370\n", 140 | "Washington metropolitan area, 38.88676, -77.04162\n", 141 | "World Trade Center site, 40.71164, -74.01225\n" 142 | ] 143 | } 144 | ], 145 | "source": [ 146 | "#This code iterates through the internal links on the wikipedia page and searches for pages that contain geospatial coorindates.\n", 147 | "\n", 148 | "print (\"Location, lat, long\")\n", 149 | "subject = wikipedia.page(topic)\n", 150 | "subject_links = subject.links\n", 151 | "for i in subject_links:\n", 152 | " try:\n", 153 | " sub_subject = wikipedia.page(i)\n", 154 | " lat = sub_subject.coordinates[0]\n", 155 | " lng = sub_subject.coordinates[1]\n", 156 | " print (i + \", \" + str(round(lat, 5)) + \", \" + str(round(lng, 5)))\n", 157 | " except:\n", 158 | " pass" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [] 167 | } 168 | ], 169 | "metadata": { 170 | "kernelspec": { 171 | "display_name": "ArcGISPro", 172 | "language": "Python", 173 | "name": "python3" 174 | }, 175 | "language_info": { 176 | "file_extension": ".py", 177 | "name": "python", 178 | "version": "3" 179 | } 180 | }, 181 | "nbformat": 4, 182 | "nbformat_minor": 2 183 | } 184 | -------------------------------------------------------------------------------- /python/api/update-view-definition-with-polygon-hosted-feature-layer/README.md: -------------------------------------------------------------------------------- 1 | ## Update View Definition of a Hosted Feature Layer with a Polygon 2 | 3 | Original Author: Adam Pfister 4 | 5 | This notebook will allow you to use a polygon to define what features are returned in a hosted feature layer view. 6 | -------------------------------------------------------------------------------- /python/api/update-view-definition-with-polygon-hosted-feature-layer/Update View Definition of a Hosted Feature Layer with a Polygon.ipynb: -------------------------------------------------------------------------------- 1 | {"cells":[{"metadata":{},"cell_type":"markdown","source":"# Update a Feature Layer View with a Polygon Area of Interest\nThis notebook will update the `viewLayerDefinition` of a hosted feature layer view to only show features that interset a polygon. In our example below, we are querying a layer of generalized U.S. States for boundary of Indiana, then applying that polygon as the spatial filter for another layer.\n\n**Please note this may have negative performance implications. Consider your polygons and the level of detail they have before implementing this approach.**"},{"metadata":{"trusted":true},"cell_type":"code","source":"from arcgis.gis import GIS\nfrom arcgis.features import FeatureLayer\nfrom arcgis.geometry import Geometry, SpatialReference\n\nfrom copy import deepcopy\ngis = GIS(\"home\")","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Some basic setup of what state boundary we want to grab for our filter"},{"metadata":{"trusted":true},"cell_type":"code","source":"state_abbr_to_use = 'IN'\nstate_abbr_field = 'STATE_ABBR'\nwhere_clause = f\"{state_abbr_field} = '{state_abbr_to_use}'\"\nwhere_clause","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Get a reference to the State boundary layer and the hosted feature layer view we are going to update"},{"metadata":{"trusted":true},"cell_type":"code","source":"fl = FeatureLayer.fromitem(gis.content.get('99fd67933e754a1181cc755146be21ca'))\nflview_to_update = FeatureLayer.fromitem(gis.content.get('400ab9c2d7024058b5c6c2f38a714fd3'))","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Here is our template JSON we will use. We'll replace the `rings` property with what comes back from our State boundary query"},{"metadata":{"trusted":true},"cell_type":"code","source":"vld_base = {\n \"viewLayerDefinition\": {\n \"filter\": {\n \"operator\": \"esriSpatialRelIntersects\",\n \"value\": {\n \"geometryType\": \"esriGeometryPolygon\",\n \"geometry\": {\n \"rings\": [\n [\n [-13478878.103229811, 5474302.767027485],\n [-12940761.424102314, 5880336.2612782335],\n [-12877165.816569064, 5469410.797217235],\n [-13718584.62393206, 4857914.570935988],\n [-13713692.654121809, 5430275.038735235],\n [-13478878.103229811, 5474302.767027485]\n ]\n ],\n \"spatialReference\": { \"wkid\": 102100, \"latestWkid\": 3857 }\n }\n }\n }\n }\n}","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Make a copy of our template JSON"},{"metadata":{"trusted":true},"cell_type":"code","source":"vld_to_update = deepcopy(vld_base)","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Execute our query Indiana's geometry"},{"metadata":{"trusted":true},"cell_type":"code","source":"fset = fl.query(where=where_clause, out_fields=state_abbr_field)\ngeom = fset.features[0].geometry","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Set our `rings` to be that of Indiana's"},{"metadata":{"trusted":true},"cell_type":"code","source":"vld_to_update['viewLayerDefinition']['filter']['value']['geometry']['rings'] = geom['rings']","execution_count":null,"outputs":[]},{"metadata":{},"cell_type":"markdown","source":"Tell the feature layer view to update its definition"},{"metadata":{"trusted":true},"cell_type":"code","source":"flview_to_update.manager.update_definition(vld_to_update)","execution_count":null,"outputs":[]}],"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"esriNotebookRuntime":{"notebookRuntimeName":"ArcGIS Notebook Python 3 Standard","notebookRuntimeVersion":"5.0"},"language_info":{"name":"python","version":"3.7.9","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat":4,"nbformat_minor":2} -------------------------------------------------------------------------------- /python/arcpy/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/python/arcpy/.gitignore -------------------------------------------------------------------------------- /python/arcpy/dataFrameToTable/README.md: -------------------------------------------------------------------------------- 1 | ## DataFrameToTable (in `dataFrameToTable.py`) 2 | 3 | Author: Jake Lucas 4 | 5 | This python class inserts a dataframe (specifically a pandas.DataFrame or pandas.io.parser.TextFileReader for when a file is streamed in using chunks) into an ArcGIS Pro file geodatabase table 6 | 7 | ### Requirements: 8 | pandas 9 | arcpy -------------------------------------------------------------------------------- /python/arcpy/dataFrameToTable/dataFrameToTable.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from typing import Union, List 3 | from pandas.errors import EmptyDataError 4 | import re 5 | import arcpy 6 | import os 7 | import logging 8 | import pprint 9 | import time 10 | 11 | class DataFrameToTable: 12 | 13 | def __init__(self, Df:Union[pd.DataFrame, pd.io.parsers.TextFileReader], outTableName:str, outGDB:str=arcpy.env.workspace, dtype={}, logger=None): 14 | """ 15 | Converts a pd.DataFrame into an table hosted in a file geodatabase within ArcGIS 16 | 17 | :param DataFrame or TextFileReader Df: df or object containing df chunks 18 | :param str outTable: the name of the output table 19 | :param str outGDB: the location of the file geodatabase that the output table will be stored in 20 | :param dict dtype: NotImplemented 21 | :raises pd.errors.EmptyDataError: if the datatframe is empty 22 | :raises ValueError: if an empty output table name was provided 23 | :raises TypeError: if the data provided is not a DataFrame or TextFileReader 24 | """ 25 | self._logger = logger if logger is not None else logging.getLogger('DataFrameToTable') 26 | # self._logger.setLevel(logging.INFO) 27 | 28 | 29 | #attribute assignment 30 | if self._validateDataFrame(Df): 31 | self.Df = Df 32 | 33 | #if an empty output table name is assigned 34 | if outTableName == '': 35 | raise ValueError('Output table name is required.') 36 | self.outTableName = outTableName 37 | self.outGDB = outGDB 38 | arcpy.env.workspace = self.outGDB 39 | # pprint.pprint(self.colsAfterAndBefore) 40 | 41 | def _validateDataFrame(self, Df): 42 | #if the df is a standard DataFrame 43 | if type(Df) == pd.DataFrame: 44 | self._logger.info('Using regular dataframe') 45 | 46 | if Df.empty: 47 | self._logger.error('Empty dataframe') 48 | raise EmptyDataError('DataFrame is empty') 49 | 50 | self.colsAndTypes = {name: Df.dtypes[name] for name in list(Df.columns)} 51 | self._isIterable = False 52 | 53 | 54 | #if the df is a large file read in through chunks 55 | elif type(Df) == pd.io.parsers.TextFileReader: 56 | self._logger.info('Using large dataframe') 57 | for chunk in Df: 58 | self.colsAndTypes = {name: chunk.dtypes[name] for name in list(chunk.columns)} 59 | 60 | if chunk.empty: 61 | self._logger.error('Empty dataframe') 62 | raise EmptyDataError('DataFrame is empty') 63 | 64 | break 65 | self._isIterable = True 66 | 67 | else: 68 | raise TypeError(f'Invalid Df type. Type "{type(Df)}" is not a DataFrame or TextFileReader') 69 | 70 | return True 71 | 72 | def createTableAndAddFields(self) -> dict: 73 | #converting any non-arcpy-safe column names and datatypes into arcpy-safe ones 74 | arcpySafeColsAndTypes, colsAfterAndBefore = self._getArcpyTypesAndConversionFromDf(self.colsAndTypes) 75 | 76 | #populating the field description with the names, dtypes, and aliases (from the dataframe) 77 | fieldDescription = [[name, value, colsAfterAndBefore[name]] for name, value in arcpySafeColsAndTypes.items()] 78 | 79 | #creating the table and adding the correct fields and datatypes 80 | arcpy.management.CreateTable(self.outGDB, self.outTableName) 81 | self._logger.info(f'Created table: {self.outTableName}') 82 | arcpy.management.AddFields(self.outTableName, fieldDescription) 83 | self._logger.info(f'Added fields to table: {self.outTableName}') 84 | 85 | return colsAfterAndBefore 86 | 87 | @staticmethod 88 | def _getArcpyTypesAndConversionFromDf(colsAndTypesDict:dict): 89 | 90 | if len(colsAndTypesDict) < 1: 91 | raise EmptyDataError('No columns or types in this DataFrame') 92 | 93 | textTypeList = ['object'] 94 | intTypeList = ['int64'] 95 | floatTypeList = ['float64'] 96 | 97 | arcpySafeColsAndTypes = {} 98 | colsAfterAndBefore = {} 99 | for key, value in colsAndTypesDict.items(): 100 | 101 | #converting pandas datatype to arcpy datatype 102 | if value in textTypeList: 103 | colType = 'TEXT' 104 | elif value in intTypeList: 105 | colType = 'LONG' 106 | elif value in floatTypeList: 107 | colType = 'DOUBLE' 108 | else: 109 | colType = 'TEXT' 110 | 111 | #renaming the key so that there are no spaces in it 112 | key = key.strip() 113 | # print(f'Stripped: {key}') 114 | # underscoredName = re.sub(r'[:,]', '', key) 115 | underscoredName = re.sub(r'[\s/\-:,\(\)\.]', '_', key) 116 | # print(f'First Pass: {underscoredName}') 117 | #replacing a group of underscores with only one 118 | underscoredName = re.sub(r'_+', '_', underscoredName) 119 | # print(f'Second Pass: {underscoredName}') 120 | #removing trailing underscores from columns 121 | if underscoredName.endswith('_'): 122 | underscoredName = underscoredName[:-1] 123 | 124 | if len(underscoredName) > 64: 125 | underscoredName = underscoredName[:64] 126 | 127 | # underscoredName = re.sub(r'_') 128 | #adding arcpy safe names and types to another dict 129 | arcpySafeColsAndTypes.update({underscoredName: colType}) 130 | 131 | colsAfterAndBefore.update({underscoredName: key}) 132 | 133 | return arcpySafeColsAndTypes, colsAfterAndBefore 134 | 135 | def populateTable(self, colsAfterAndBefore:dict): 136 | insertFields = list(colsAfterAndBefore.keys()) 137 | insertCursor = arcpy.da.InsertCursor(self.outTableName, insertFields) 138 | 139 | if self._isIterable: 140 | for chunk in self.Df: 141 | start = time.time() 142 | self._insertRows(chunk, insertFields, colsAfterAndBefore, insertCursor) 143 | end = time.time() 144 | self._logger.info(f'Chunk inserted in {end-start: .2f} seconds') 145 | elif not self._isIterable: 146 | start = time.time() 147 | self._insertRows(self.Df, insertFields, colsAfterAndBefore, insertCursor) 148 | end = time.time() 149 | self._logger.info(f'DataFrame inserted in {end-start: .2f} seconds') 150 | 151 | def _insertRows(self, Df:pd.DataFrame, insertFields:List[str], colsAfterAndBefore:dict, insertCursor:arcpy.da.InsertCursor): 152 | for index, row in Df.iterrows(): 153 | inputRow = [row[colsAfterAndBefore[name]] for name in insertFields] 154 | insertCursor.insertRow(inputRow) 155 | 156 | def main(self): 157 | #creating ArcGIS table from the df names and dtypes 158 | self.colsAfterAndBefore = self.createTableAndAddFields() 159 | self.populateTable(self.colsAfterAndBefore) 160 | 161 | 162 | 163 | -------------------------------------------------------------------------------- /python/arcpy/dataFrameToTable/test_dataFrameToTable.py: -------------------------------------------------------------------------------- 1 | from pandas.core.frame import DataFrame 2 | from dataFrameToTable import DataFrameToTable 3 | import pytest 4 | import pandas as pd 5 | import os 6 | import arcpy 7 | import logging 8 | 9 | arcpy.env.overwriteOutput = True 10 | 11 | @pytest.fixture 12 | def emptyDataFrame(): 13 | return pd.DataFrame() 14 | 15 | @pytest.fixture 16 | def basicDataFrame(): 17 | df = pd.DataFrame([['djfasdjf', 2, 1.5754], ['dfjaksldjf', 4, 5.353], ['afsdjf', 6, 5.357], ['dlfdsjkafddj', 8, 5.28], ['dfaskldja', 10, 3.5674]], columns=['string', 'int', 'float/double']) 18 | return df 19 | 20 | @pytest.fixture 21 | def basicTextFileReader(tmpdir, basicDataFrame): 22 | fullPath = os.path.join(tmpdir, 'example.csv') 23 | basicDataFrame.to_csv(fullPath) 24 | reader = pd.read_csv(fullPath, chunksize=2) 25 | return reader 26 | 27 | @pytest.fixture 28 | def testFileGDB(tmpdir): 29 | filename = 'test.gdb' 30 | arcpy.management.CreateFileGDB(str(tmpdir), filename) 31 | fullPath = os.path.join(str(tmpdir), filename) 32 | return fullPath 33 | 34 | class TestDataFrameToTable: 35 | 36 | def test_emptyDataFrame(self, emptyDataFrame): 37 | with pytest.raises(pd.errors.EmptyDataError): 38 | DataFrameToTable(emptyDataFrame, 'test', '') 39 | 40 | def test_recognizedDataFrame(self, basicDataFrame): 41 | obj = DataFrameToTable(basicDataFrame, 'test', '') 42 | assert obj._isIterable == False 43 | 44 | def test_recognizeTextFileReader(self, basicTextFileReader): 45 | obj = DataFrameToTable(basicTextFileReader, 'test', '') 46 | assert obj._isIterable == True 47 | 48 | def test_noOutputTableName(self, basicDataFrame): 49 | with pytest.raises(ValueError): 50 | DataFrameToTable(basicDataFrame, '', '') 51 | 52 | @pytest.mark.parametrize('inputSource', [ 53 | 'jdklafjd;skajs', 54 | lambda: 542, 55 | (1, 2, 3, 4), 56 | {'key': 'value'} 57 | ]) 58 | def test_incorrectInputDataType(self, inputSource): 59 | with pytest.raises(TypeError): 60 | DataFrameToTable(inputSource, 'test', '') 61 | 62 | @pytest.mark.parametrize('paramsAndOutcome', [ 63 | ({'name with spaces': 'object'}, {'name_with_spaces': 'TEXT'}), 64 | ({'CAPITAL12'}) 65 | ({'na3857329me:,/::---()).. _with characters, and spaces': 'int64'}, {'na3857329me_with_characters_and_spaces': 'LONG'}), 66 | ({'normalName': 'float64'}, {'normalName': 'DOUBLE'}) 67 | 68 | ]) 69 | def test_getArcpyTypesAndConversionFromDf(self, paramsAndOutcome): 70 | """ 71 | Verifies the efficacy of this conversion function 72 | 73 | :param tuple paramsAndOutcome: ({inputColName: inputColType}, {expectedOutColName: expectedOutColType}, {expectedOutColName: inputColName}) 74 | """ 75 | colAndType = paramsAndOutcome[0] 76 | expectedArcpySafe = paramsAndOutcome[1] 77 | expectedConversionDict = {list(expectedArcpySafe.keys())[0]: list(colAndType.keys())[0]} 78 | 79 | 80 | outArcpySafe, outConversion = DataFrameToTable._getArcpyTypesAndConversionFromDf(colAndType) 81 | assert expectedArcpySafe == outArcpySafe 82 | assert expectedConversionDict == outConversion 83 | 84 | @pytest.mark.slow 85 | @pytest.mark.parametrize('inputSource', [ 86 | pytest.lazy_fixture('basicDataFrame'), 87 | pytest.lazy_fixture('basicTextFileReader') 88 | ]) 89 | def test_createTableAndAddFields(self, inputSource, testFileGDB): 90 | 91 | obj = DataFrameToTable(inputSource, 'test', testFileGDB) 92 | try: 93 | obj.createTableAndAddFields() 94 | except Exception as e: 95 | logging.exception(e) 96 | assert False 97 | else: 98 | assert True 99 | 100 | @pytest.mark.slow 101 | @pytest.mark.parametrize('inputSource', [ 102 | pytest.lazy_fixture('basicDataFrame'), 103 | pytest.lazy_fixture('basicTextFileReader') 104 | ]) 105 | def test_populateTable(self, inputSource, testFileGDB): 106 | obj = DataFrameToTable(inputSource, 'test', testFileGDB) 107 | conversionDict = obj.createTableAndAddFields() 108 | try: 109 | obj.populateTable(conversionDict) 110 | except Exception as e: 111 | logging.exception(e) 112 | assert False 113 | else: 114 | assert True -------------------------------------------------------------------------------- /python/misc/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/python/misc/.gitignore -------------------------------------------------------------------------------- /python/misc/dataFrameToDatabase/README.md: -------------------------------------------------------------------------------- 1 | ## DataFrameToDatabase (in `dataFrameToDatabase.py`) 2 | 3 | Author: Jake Lucas 4 | 5 | This python class will insert a dataframe (specifically a pandas.DataFrame or pandas.io.parsers.TexFileReader for when a csv is streamed in using chunks) into a database table. Under the hood, this uses the pd.DataFrame.to_sql method, so this class will be compatible with every database that is compatible with the to_sql method (which uses sqlalchemy under the hood). 6 | 7 | ### Requirements: 8 | sqlalchemy 9 | pandas 10 | -------------------------------------------------------------------------------- /python/misc/dataFrameToDatabase/dataFrameToDatabase.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | import pandas as pd 4 | from pandas.errors import EmptyDataError 5 | import sqlalchemy 6 | from typing import Union, List 7 | 8 | class DataFrameToDatabase: 9 | 10 | def __init__(self, df:Union[pd.DataFrame, pd.io.parsers.TextFileReader], 11 | dbTableName:str, 12 | driver:str, 13 | username:str=None, 14 | password:str=None, 15 | address:str=None, 16 | dbName:str=None, 17 | port:Union[int, str]=None, 18 | query:dict={}, 19 | dbEcho:bool=True, 20 | if_exists:str='fail', 21 | index:bool=True, 22 | index_label:str=None, 23 | chunksize:int=None, 24 | dtype:dict=None, 25 | ): 26 | 27 | #private 28 | self._logger = logging.getLogger('DataFrameToDatabase') 29 | self._logger.setLevel(logging.INFO) 30 | 31 | #default value updated in self._validateDataFrame 32 | self._isIterable = False 33 | 34 | 35 | #pd.DataFrame.to_sql variables 36 | self._index = index 37 | self._index_label = index_label 38 | self._chunksize = chunksize 39 | self._dtype = dtype 40 | self._dbTableName = dbTableName 41 | 42 | if if_exists not in ['fail', 'append', 'replace']: 43 | raise ValueError('if_exists must be set to "fails", "replace", or "append"') 44 | elif if_exists == 'replace': 45 | self._logger.warning(f'Table "{dbTableName}" will be overwritten.') 46 | self._if_exists = if_exists 47 | 48 | #validating and categorizing it as iterable or not 49 | self._logger.info('Validating DataFrame...') 50 | if self._validateDataFrame(df): 51 | self._df = df 52 | self._logger.info('Valid DataFrame') 53 | 54 | #validating db params 55 | self._logger.info('Validating database parameters...') 56 | if self._validateDbParameters(driver, username, password, address, port, dbName, query): 57 | 58 | #sqlalchemy.create_engine parameters 59 | self._dbEcho = dbEcho 60 | self._driver = driver 61 | self._username = username 62 | self._password = password 63 | self._address = address 64 | self._port = port 65 | self._dbName = dbName 66 | self._query = query 67 | self._logger.info('Valid database parameters') 68 | 69 | 70 | # self._logger.info('Inserting data...') 71 | # self.insertData() 72 | 73 | 74 | def _validateDataFrame(self, df): 75 | """ 76 | Validates that the df isn't empty and categorizes it as iterable (TextFileReader) or not iterable (DataFrame) 77 | """ 78 | #if the df is a standard DataFrame 79 | if type(df) == pd.DataFrame: 80 | self._logger.info('Using regular dataframe') 81 | 82 | if df.empty: 83 | self._logger.error('Empty dataframe') 84 | raise EmptyDataError('DataFrame is empty') 85 | 86 | self.colsAndTypes = {name: df.dtypes[name] for name in list(df.columns)} 87 | self._isIterable = False 88 | 89 | 90 | #if the df is a large file read in through chunks 91 | elif type(df) == pd.io.parsers.TextFileReader: 92 | self._logger.info('Using large dataframe') 93 | for chunk in df: 94 | self.colsAndTypes = {name: chunk.dtypes[name] for name in list(chunk.columns)} 95 | 96 | if chunk.empty: 97 | self._logger.error('Empty dataframe') 98 | raise EmptyDataError('DataFrame is empty') 99 | 100 | break 101 | self._isIterable = True 102 | 103 | else: 104 | raise TypeError(f'Invalid df type. Type "{type(df)}" is not a DataFrame or TextFileReader') 105 | 106 | return True 107 | 108 | def _validateDbParameters(self, driver, username, password, address, port, dbName, query): 109 | """ 110 | Validates database parameters by passing it into create_engine. If it succeeds, the parameters are valid 111 | """ 112 | try: 113 | # if driver: 114 | # driver = '+' + driver 115 | # if port: 116 | # port = ':' + str(port) 117 | # if password: 118 | # password = ':' + password 119 | # if address: 120 | # address = '@' + address 121 | 122 | dbUrl = sqlalchemy.engine.URL.create(drivername=driver, 123 | username=username, 124 | password=password, 125 | host=address, 126 | port=port, 127 | database=dbName, 128 | query=query) 129 | 130 | self._engine = sqlalchemy.create_engine(dbUrl, echo=self._dbEcho) 131 | except Exception as e: 132 | self._logger.exception(e) 133 | raise e 134 | else: 135 | return True 136 | 137 | def insertData(self): 138 | """ 139 | Inserts data into the database depending on the type of DataFrame given 140 | """ 141 | if self._isIterable: 142 | #boolean tracking if function DataFrame.to_sql has been run for any chunk 143 | updated = False 144 | for chunk in self._df: 145 | start = time.time() 146 | if not updated: 147 | chunk.to_sql(name=self._dbTableName, 148 | con=self._engine, 149 | if_exists=self._if_exists, 150 | index=self._index, 151 | index_label=self._index_label, 152 | chunksize=self._chunksize, 153 | dtype=self._dtype) 154 | updated = True 155 | 156 | elif updated: 157 | chunk.to_sql(name=self._dbTableName, 158 | con=self._engine, 159 | if_exists='append', 160 | index=self._index, 161 | index_label=self._index_label, 162 | chunksize=self._chunksize, 163 | dtype=self._dtype) 164 | end = time.time() 165 | self._logger.info(f'Chunk inserted in {end-start:.3f} seconds') 166 | 167 | 168 | 169 | elif not self._isIterable: 170 | start = time.time() 171 | self._df.to_sql(name=self._dbTableName, 172 | con=self._engine, 173 | if_exists=self._if_exists, 174 | index=self._index, 175 | index_label=self._index_label, 176 | chunksize=self._chunksize, 177 | dtype=self._dtype) 178 | end = time.time() 179 | self._logger.info(f'DataFrame inserted in {end-start:.3f} seconds') 180 | 181 | def main(self): 182 | self._logger.info('Inserting data...') 183 | self.insertData() 184 | 185 | 186 | 187 | -------------------------------------------------------------------------------- /python/misc/dataFrameToDatabase/test_dataFrameToDatabase.py: -------------------------------------------------------------------------------- 1 | from dataFrameToDatabase import DataFrameToDatabase 2 | import pytest 3 | 4 | #not implemented yet 5 | class TestDataFrameToDatabase: 6 | pass 7 | 8 | 9 | -------------------------------------------------------------------------------- /python/misc/fileDownloader/README.md: -------------------------------------------------------------------------------- 1 | ## FileDownloader (in `fileDownloader.py`) 2 | 3 | Author: Jake Lucas 4 | 5 | This python class will download a file from a website link (e.g. www.example.com/file.csv) to whatever path is specified to the class 6 | 7 | ### Requirements: 8 | requests -------------------------------------------------------------------------------- /python/misc/fileDownloader/customErrors.py: -------------------------------------------------------------------------------- 1 | class DirectoryNotFoundError(FileNotFoundError): 2 | pass 3 | 4 | class AmbiguousSelectionException(Exception): 5 | pass 6 | 7 | class NoSelectionException(Exception): 8 | pass -------------------------------------------------------------------------------- /python/misc/fileDownloader/downloadManualTest.py: -------------------------------------------------------------------------------- 1 | from fileDownloader import FileDownloader 2 | from zipfile import ZipFile 3 | 4 | def main(): 5 | fileUrl = r'https://www.stats.govt.nz/assets/Uploads/Business-financial-data/Business-financial-data-March-2021-quarter/Download-data/business-financial-data-march-2021-quarter-csv.zip' 6 | downloader = FileDownloader(fileUrl) 7 | path = downloader.download() 8 | assert ZipFile(path).testzip() is None 9 | # print(path) 10 | 11 | if __name__=='__main__': 12 | main() -------------------------------------------------------------------------------- /python/misc/fileDownloader/fileDownloader.py: -------------------------------------------------------------------------------- 1 | # from ntpath import join 2 | import requests 3 | import logging 4 | import os 5 | import shutil 6 | import time 7 | from customErrors import DirectoryNotFoundError 8 | 9 | # logging.basicConfig(level=logging.INFO) 10 | 11 | class FileDownloader: 12 | 13 | def __init__(self, url:str, saveDir:str=os.getcwd(), params:dict=None): 14 | """ 15 | Downloads a file from a given link to a given directory 16 | 17 | :param str url: the url containing the file to be downloaded 18 | :param str saveDir: the directory where the file will be downloaded to. Defaults to current directory 19 | :param (optional) dict params: NotImplemented 20 | :raises requests.exceptions.MissingSchema if the url is an invalid url 21 | :raises requests.exceptions.HttpError if the url doesn't return a 2xx status code 22 | :raises DirectoryNotFoundError if the directory passed doesn't exist 23 | """ 24 | self._validateUrl(url) 25 | self.url = url 26 | self.fileName = url.split('/')[-1] 27 | 28 | # if saveDir != os.path.abspath(os.path.dirname(__file__)): 29 | # saveDir = os.path.join(os.path.abspath(os.path.dirname(__file__)), saveDir) 30 | if saveDir != os.getcwd(): 31 | saveDir = os.path.join(os.getcwd(), saveDir) 32 | saveDir = os.path.normpath(saveDir) 33 | # saveDir = os.path.normcase(saveDir) 34 | self._validatesaveDir(saveDir) 35 | self.saveDir = saveDir 36 | 37 | self.fullPath = os.path.join(self.saveDir, self.fileName) 38 | self._logger = logging.getLogger('FileDownloader') 39 | self._logger.setLevel(logging.INFO) 40 | 41 | 42 | def _validateUrl(self, url:str): 43 | try: 44 | r = requests.get(url, stream=True) 45 | except requests.exceptions.MissingSchema: 46 | raise requests.exceptions.MissingSchema(f'Invalid url: {url}') 47 | else: 48 | r.raise_for_status() 49 | 50 | def _validatesaveDir(self, saveDir:str): 51 | if not os.path.exists(saveDir): 52 | raise DirectoryNotFoundError(f'Invalid save directory. Path "{saveDir}" does not exist') 53 | 54 | def download(self) -> str: 55 | """ 56 | Downloads file from link 57 | 58 | :returns str: the path of the downloaded file 59 | """ 60 | self._logger.info(f'Downloading file: "{self.url}"') 61 | start = time.time() 62 | 63 | r = requests.get(self.url, stream=True) 64 | with open(self.fullPath, 'wb') as f: 65 | shutil.copyfileobj(r.raw, f) 66 | 67 | end = time.time() 68 | self._logger.info(f'File downloaded to path: "{self.fullPath}" in {end-start: .2f} seconds') 69 | 70 | return self.fullPath 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /python/misc/fileDownloader/test_fileDownloader.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from fileDownloader import FileDownloader 3 | import requests 4 | import os 5 | from customErrors import DirectoryNotFoundError 6 | import logging 7 | from zipfile import ZipFile 8 | 9 | class TestFileDownloader: 10 | 11 | def test_validUrl(self, tmpdir): 12 | with pytest.raises(requests.exceptions.MissingSchema): 13 | FileDownloader('dakfldfjasfk', str(tmpdir)) 14 | 15 | @pytest.mark.webtest 16 | def test_onlineUrl(self, tmpdir): 17 | with pytest.raises(requests.exceptions.HTTPError): 18 | #url is a valid test url that returns a 404 19 | FileDownloader('https://reqres.in/api/users/23', str(tmpdir)) 20 | 21 | def test_validSaveDir(self, tmpdir): 22 | validUrl = r'https://reqres.in/api/users?page=2' 23 | invalidDir = os.path.join(str(tmpdir), 'nonExistent') 24 | with pytest.raises(DirectoryNotFoundError): 25 | FileDownloader(validUrl, invalidDir) 26 | 27 | @pytest.mark.webtest 28 | def test_downloadZip(self, tmpdir): 29 | fileUrl = r'https://www.stats.govt.nz/assets/Uploads/Business-financial-data/Business-financial-data-March-2021-quarter/Download-data/business-financial-data-march-2021-quarter-csv.zip' 30 | fileName = r'business-financial-data-march-2021-quarter-csv.zip' 31 | 32 | downloader = FileDownloader(fileUrl, str(tmpdir)) 33 | try: 34 | path = downloader.download() 35 | 36 | except Exception as e: 37 | logging.exception(e) 38 | assert False 39 | else: 40 | assert path == os.path.join(str(tmpdir), fileName) 41 | #asserting that the zipfile is valid and uncorrupted 42 | assert ZipFile(path).testzip() is None 43 | -------------------------------------------------------------------------------- /r/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/r/.gitignore -------------------------------------------------------------------------------- /r/README.MD: -------------------------------------------------------------------------------- 1 | ## R 2 | 3 | --- 4 | 5 | The R – ArcGIS Community is a community driven collection of free, open source projects making it easier and faster for R users to work with ArcGIS data, and ArcGIS users to leverage the analysis capabilities of R. 6 | 7 | For more information, visit the [R - ArcGIS Bridge Overview Site](https://www.esri.com/en-us/arcgis/products/r-arcgis-bridge/overview) 8 | -------------------------------------------------------------------------------- /runtime-sdk/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/runtime-sdk/.gitignore -------------------------------------------------------------------------------- /runtime-sdk/README.MD: -------------------------------------------------------------------------------- 1 | ## ArcGIS Runtime SDKs 2 | 3 | ArcGIS Runtime SDKs help you build and deploy native applications to a variety of popular platforms and devices. Add powerful spatial capabilities to your native apps and empower your app users to do all things GIS, even when offline. 4 | 5 | For more information, visit the [ArcGIS Runtime SDKs Homepage](https://developers.arcgis.com/arcgis-runtime/). 6 | -------------------------------------------------------------------------------- /survey123/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/survey123/.gitignore -------------------------------------------------------------------------------- /survey123/README.MD: -------------------------------------------------------------------------------- 1 | ## Survey123 2 | 3 | --- 4 | 5 | ### Forms Examples 6 | 7 | `forms` 8 | 9 | Survey123 [XLS](https://doc.arcgis.com/en/survey123/desktop/create-surveys/xlsformessentials.htm) form examples that highlight a pattern or contain interesting calculations. 10 | 11 | For more information on getting started creating Forms with Survey123, view the documentation [here](https://doc.arcgis.com/en/survey123/desktop/create-surveys/createsurveys.htm). 12 | 13 | ### Custom JavaScript Functions 14 | 15 | `javascript` 16 | 17 | Starting with ArcGIS Survey123 version 3.10, you can incorporate logic in your forms using custom JavaScript (JS) functions. Custom JavaScript functions complement XLSForm expression syntax, giving you flexibility to build better calculations, data validation rules, constraints etc. 18 | 19 | To get started with custom JavaScript functions, view this [Blog Post](https://community.esri.com/groups/survey123/blog/2020/08/07/extending-survey123-smart-forms-with-custom-js-functions). 20 | -------------------------------------------------------------------------------- /survey123/forms/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/survey123/forms/.gitignore -------------------------------------------------------------------------------- /survey123/javascript/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Esri/Geospatial-Center-Code/a8a1c7028d254690af788cbdd9cbdf859a422413/survey123/javascript/.gitignore -------------------------------------------------------------------------------- /survey123/javascript/IndexedRepeat.js: -------------------------------------------------------------------------------- 1 | function indexed_repeat(name, grp, i, subgrp, subi, subsubgrp, subsubi) { 2 | let outValue = null; 3 | if (name == undefined || !Array.isArray(grp) || !Number.isInteger(i) || i > grp.length || i < 1) { 4 | return outValue; 5 | } 6 | if (subgrp != undefined && Number.isInteger(subi) && subi < grp[i - 1][subgrp].length && subi > 0) { 7 | if (subsubgrp != undefined && Number.isInteger(subsubi) && subsubi < grp[i - 1][subgrp][subi - 1][subsubgrp].length && subsubi > 0) { 8 | outValue = grp[i - 1][subgrp][subi - 1][subsubgrp][subsubi - 1][name]; 9 | } else { 10 | // return JSON.stringify(grp) 11 | outValue = grp[i - 1][subgrp][subi - 1][name]; 12 | } 13 | } else { 14 | outValue = grp[i - 1][name]; 15 | } 16 | 17 | return outValue 18 | } 19 | --------------------------------------------------------------------------------