├── partnerutils ├── __init__.py ├── README.md ├── processing_utils.py ├── feature_utils.py ├── user_utils.py ├── clone_utils.py ├── cool_utils.py └── etl_utils.py ├── sample_data └── sample_census_tract_geoid.csv ├── REST ├── README.md └── AddFeaturesOnTimer.py ├── common_workflows ├── README.md ├── standard_geography.ipynb ├── csv_geocode.ipynb ├── distribute_items.ipynb └── update_webmaps.ipynb ├── feature_layers ├── README.md ├── manage_indexes.ipynb ├── enable_time.ipynb ├── shapefile_upload.ipynb ├── csv_upload.ipynb ├── manage_fields.ipynb ├── geojson_upload.ipynb ├── create_views.ipynb └── update_data.ipynb ├── .gitignore ├── README.md └── setup.py /partnerutils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /sample_data/sample_census_tract_geoid.csv: -------------------------------------------------------------------------------- 1 | census_tract_geoid 2 | 36047032100 3 | 36081065702 -------------------------------------------------------------------------------- /REST/README.md: -------------------------------------------------------------------------------- 1 | # ArcGISAddFeatures 2 | Headless ArcGIS python script to push attribute data into an ArcGIS Online hosted data table. The script runs on a timer at a user defined interval. 3 | 4 | Prerequisites: 5 | ArcGIS Online account 6 | Hosted data table with fields 'pk', 'amount', 'datetime1' 7 | 8 | Usage: 9 | * AddFeaturesOnTimer.py 'URL of the hosted table' 'interval (sec)' 'ID' 'Value' 'My User Name' 'My Password' 10 | -------------------------------------------------------------------------------- /partnerutils/README.md: -------------------------------------------------------------------------------- 1 | # Partner Utils 2 | 3 | > Functions that I've found helpful 4 | 5 | * [`cool_utils.py`](/partnerutils/cool_utils.py) - functions I want to remember and hopefully you will too! 6 | * [`etl_utils.py`](/partnerutils/etl_utils.py) - assist with common ETL logic 7 | * [`user_utils.py`](/partnerutils/user_utils.py) - assist with adding users 8 | * [`clone_utils.py`](/partnerutils/clone_utils.py) - assist with cloning groups & items 9 | * [`feature_utils.py`](partnerutils/feature_utils.py) - assist with features and feature data types -------------------------------------------------------------------------------- /common_workflows/README.md: -------------------------------------------------------------------------------- 1 | # Common 2 | 3 | > Common workflows with the Python API 4 | 5 | * [`csv_geocode.ipynb`](/common_workflows/csv_geocode.ipynb) - [geocode](https://developers.arcgis.com/features/geocoding/) rows in `csvs` and `dataframes` 6 | * [`vector_data_products`](/common_workflows/vector_data_products.ipynb) - end-to-end workflows for managing vector content and derivative information products 7 | * [`distribute_items.ipynb`](/common_workflows/distribute_items.ipynb) - common patterns for distributing items to another organization 8 | * [`standard_geography.ipynb`](/common_workflows/standard_geography.ipynb) - enrich [standard geography](https://developers.arcgis.com/rest/geoenrichment/api-reference/standard-geography-query.htm) ids, such as `census blocks`, with geometries -------------------------------------------------------------------------------- /feature_layers/README.md: -------------------------------------------------------------------------------- 1 | # Feature Layers 2 | 3 | > Common operations with [hosted feature layers](https://doc.arcgis.com/en/arcgis-online/share-maps/hosted-web-layers.htm) 4 | 5 | * [`csv_upload.ipynb`](/feature_layers/csv_upload.ipynb) - upload a folder of `csvs` & `dataframes` 6 | * [`shapefile_upload.ipynb`](/feature_layers/shapefile_upload.ipynb) - upload a folder of `Shapefiles` 7 | * [`geojson_upload.ipynb`](/feature_layers/geojson_upload.ipynb) - upload a geojson file 8 | * [`update_data.ipynb`](/feature_layers/update_data.ipynb) - a couple different workflows for updating uploaded / hosted data 9 | * [`create_views.ipynb`](/feature_layers/create_views.ipynb) - create database views with separate permissions against one authoritative layer 10 | * [`manage_fields.ipynb`](/feature_layers/manage_fields.ipynb) - view and edit fields 11 | * [`manage_indexes.ipynb`](/feature_layers/manage_indexes.ipynb) - view, edit, and refresh indexes 12 | * [`enable_time.ipynb`](/feature_layers/enable_time.ipynb) - add time metadata that will be reflected in ArcGIS app UIs -------------------------------------------------------------------------------- /partnerutils/processing_utils.py: -------------------------------------------------------------------------------- 1 | """******************************************** 2 | * A couple utility functions to help with processing data 3 | ********************************************""" 4 | from arcgis.geocoding import batch_geocode 5 | from partnerutils.cool_utils import memoize, chunk 6 | 7 | @memoize 8 | def batch_geocode_memo(addresses, **kwargs): 9 | """Batch geocodes a list of addresses and memoizes the results 10 | to avoid repeated calls and credit consumption. 11 | 12 | args: 13 | addresses - the addresses to geocode 14 | 15 | **kwargs: 16 | cache_path - persist the results to a file 17 | get_key - function to uniquely identify each address 18 | all others from here: 19 | https://developers.arcgis.com/python/api-reference/arcgis.geocoding.html?highlight=batch_geocode#arcgis.geocoding.batch_geocode""" 20 | 21 | output = [] 22 | for c in chunk(addresses): # split into chunks for large volumes 23 | results = batch_geocode(addresses=c, **kwargs) 24 | output += results 25 | 26 | return output -------------------------------------------------------------------------------- /REST/AddFeaturesOnTimer.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import sys 3 | import requests 4 | import json 5 | from datetime import datetime 6 | 7 | 8 | # Disable warnings 9 | requests.packages.urllib3.disable_warnings() 10 | 11 | # Get Parameters 12 | addfsURL = str(sys.argv[1]) 13 | interval = int(sys.argv[2]) 14 | deviceID = int(sys.argv[3]) 15 | quantity = int(sys.argv[4]) 16 | username = str(sys.argv[5]) 17 | password = str(sys.argv[6]) 18 | 19 | # Generate Token 20 | tokenURL = 'https://www.arcgis.com/sharing/rest/generateToken' 21 | params = {'f': 'pjson', 'username': username, 'password': password, 'referer': 'http://www.arcgis.com'} 22 | r = requests.post(tokenURL, data = params, verify=False) 23 | response = json.loads(r.content) 24 | token = response['token'] 25 | 26 | # define timer 27 | def fireTimer(): 28 | threading.Timer(interval, fireTimer).start() 29 | 30 | #get timestamp 31 | noww = datetime.now() 32 | dateTimeNow = str(noww) 33 | 34 | attr = [{"attributes":{"pk":deviceID, "amount":quantity, "datetime1":dateTimeNow}}] 35 | params = {"features": json.dumps(attr), 'token': token, 'f': 'json'} 36 | r = requests.post(addfsURL, data = params, verify=False) 37 | 38 | print(r.json) 39 | 40 | fireTimer() 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # local 2 | client_server 3 | *DS_STORE 4 | *test.csv 5 | test/ 6 | test_data/ 7 | stage/ 8 | .vscode/ 9 | !.vscode/settings.json 10 | !.vscode/tasks.json 11 | !.vscode/launch.json 12 | !.vscode/extensions.json 13 | 14 | # Byte-compiled / optimized / DLL files 15 | __pycache__/ 16 | *.py[cod] 17 | *$py.class 18 | 19 | # C extensions 20 | *.so 21 | 22 | # Distribution / packaging 23 | .Python 24 | env/ 25 | build/ 26 | develop-eggs/ 27 | dist/ 28 | downloads/ 29 | eggs/ 30 | .eggs/ 31 | lib/ 32 | lib64/ 33 | parts/ 34 | sdist/ 35 | var/ 36 | wheels/ 37 | *.egg-info/ 38 | .installed.cfg 39 | *.egg 40 | 41 | # PyInstaller 42 | # Usually these files are written by a python script from a template 43 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 44 | *.manifest 45 | *.spec 46 | 47 | # Installer logs 48 | pip-log.txt 49 | pip-delete-this-directory.txt 50 | 51 | # Unit test / coverage reports 52 | htmlcov/ 53 | .tox/ 54 | .coverage 55 | .coverage.* 56 | .cache 57 | nosetests.xml 58 | coverage.xml 59 | *.cover 60 | .hypothesis/ 61 | 62 | # Translations 63 | *.mo 64 | *.pot 65 | 66 | # Django stuff: 67 | *.log 68 | local_settings.py 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # pyenv 87 | .python-version 88 | 89 | # celery beat schedule file 90 | celerybeat-schedule 91 | 92 | # SageMath parsed files 93 | *.sage.py 94 | 95 | # dotenv 96 | .env 97 | 98 | # virtualenv 99 | .venv 100 | venv/ 101 | ENV/ 102 | 103 | # Spyder project settings 104 | .spyderproject 105 | .spyproject 106 | 107 | # Rope project settings 108 | .ropeproject 109 | 110 | # mkdocs documentation 111 | /site 112 | 113 | # mypy 114 | .mypy_cache/ 115 | -------------------------------------------------------------------------------- /partnerutils/feature_utils.py: -------------------------------------------------------------------------------- 1 | """utility functions to assist with features and feature data types""" 2 | 3 | from arcgis.geometry import SpatialReference, Point 4 | import pandas as pd 5 | 6 | def sdf_from_xyz(df, x_col, y_col, z_col=None, sr=None): 7 | """builds a SpatialDataFrame from DataFrame with 8 | x, y, and z columns 9 | 10 | args: 11 | df - the dataframe 12 | x_col - the dataframe column corresponding to x coordinate 13 | y_col - the dataframe column corresponding to y coordinate 14 | z_col - optional, the dataframe column corresponding to z coordinate 15 | sr - the spatial reference for the spatial data frame 16 | """ 17 | 18 | if not z_col: 19 | return pd.DataFrame.spatial.from_xy(df, x_col, y_col, sr) 20 | 21 | def point_for_row(x, y, z, sr): 22 | return Point({'x' : x, 'y' : y, 'z': z, "spatialReference" : sr}) 23 | 24 | if sr is None: 25 | sr = SpatialReference({'wkid' : 4326}) 26 | 27 | df_geom = df.apply(lambda row: point_for_row(row[x_col], 28 | row[y_col], 29 | row[z_col], 30 | sr), axis=1) 31 | sdf = df.spatial.set_geometry(df_geom, sr=sr) 32 | return sdf 33 | 34 | 35 | def row_to_geojson(row, lon_field, lat_field): 36 | """returns a geojson feature for a flat dictionary 37 | 38 | args: 39 | row -- dictionary with values 40 | lon_field -- longitude dictionary key 41 | lat_field -- latitude dictionary key""" 42 | return { 43 | 'type': 'Feature', 44 | 'geometry': { 45 | 'type': 'Point', 46 | 'coordinates': [row[lon_field], row[lat_field]] 47 | }, 48 | 'properties': {**row} 49 | } 50 | 51 | def rows_to_geojson(rows, lon_field, lat_field): 52 | """returns a geojson feature collection for a list of flat dictionary rows 53 | 54 | args: 55 | rows -- list of dictionaries with values 56 | lon_field -- longitude dictionary key 57 | lat_field -- latitude dictionary key""" 58 | features = [row_to_geojson(r, lon_field, lat_field) for r in rows] 59 | return { 60 | 'type': 'FeatureCollection', 61 | 'features': features 62 | } -------------------------------------------------------------------------------- /partnerutils/user_utils.py: -------------------------------------------------------------------------------- 1 | """utility functions to assist with adding users""" 2 | import csv 3 | 4 | USER_FIELDS = ["username", "password", "firstname", "lastname", "email", "role", "groups", 5 | "groups", "description", "role", "provider", "idp_username", "level"] 6 | 7 | def add_user(user, gis, groups=None, field_map=None): 8 | """Add user to the gis 9 | * Abstraction for creating from dict such as with csv 10 | * Handles moving to groups 11 | 12 | args: 13 | user -- a dictionary containing user fields, see fields: 14 | http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html#arcgis.gis.UserManager.create 15 | gis -- gis object where users are added 16 | groups -- (optional) destination groups, compliments those in dict (default []) 17 | field_map -- (optional) change keys from defaults in USER_FIELDS to those in dict 18 | """ 19 | 20 | # set defaults 21 | groups = groups if groups else [] 22 | field_map = field_map if field_map else {} 23 | 24 | try: 25 | 26 | # Define new user fields 27 | new_user = {} 28 | for field in USER_FIELDS: 29 | new_user[field] = (user.get(field_map[field], None) 30 | if field in field_map 31 | else user.get(field, None)) 32 | 33 | print("INFO: Creating user {}".format(new_user["username"])) 34 | 35 | # Create/augment array of destination groups for user 36 | # Pop group from user because separate logic 37 | group_field = field_map["groups"] if "groups" in field_map else "groups" 38 | group_str = new_user.pop(group_field, None) 39 | if group_str: 40 | group_list = group_str.split(",") 41 | for g in group_list: 42 | group_search = gis.groups.search(g) 43 | if group_search: 44 | groups.append(group_search[0]) 45 | 46 | # Create new user 47 | result = gis.users.create(**new_user) 48 | 49 | # Sometimes there's an error that doesn't throw 50 | if not result: 51 | return 52 | 53 | # Add user to groups 54 | for g in groups: 55 | try: 56 | g.add_users([new_user['username']]) 57 | except Exception as e: 58 | print("ERR: Could not add user to group {}".format(g)) 59 | print(e) 60 | 61 | return result 62 | 63 | except Exception as e: 64 | print("ERR: Could not create user {}".format(user['username'])) 65 | print(e) 66 | 67 | def add_users_csv(csv_file, gis, groups=None, field_map=None): 68 | """Add users from csv to gis 69 | * Convenient abstraction for csvs 70 | 71 | args: 72 | csv_file -- path to csv with users to create 73 | gis -- gis object where users are added 74 | groups -- (optional) destination groups, compliments those in csv (default []) 75 | field_map -- (optional) change keys from defaults in USER_FIELDS to those in csv, see fields: 76 | http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html#arcgis.gis.UserManager.create 77 | """ 78 | results = [] 79 | with open(csv_file, 'r') as users_csv: 80 | users = csv.DictReader(users_csv) 81 | for user in users: 82 | result = add_user(user, gis, groups=groups, field_map=field_map) 83 | results.append(result) 84 | 85 | return results -------------------------------------------------------------------------------- /partnerutils/clone_utils.py: -------------------------------------------------------------------------------- 1 | """utility functions to assist with cloning groups & items""" 2 | 3 | def search_item_title(target, title): 4 | """search org for an existing item with title 5 | 6 | args: 7 | target -- target GIS to search 8 | item -- item with title to search 9 | """ 10 | s_items = target.content.search(query='title:{}'.format(title)) 11 | for s_item in s_items: 12 | if s_item.title == title: 13 | return s_item 14 | return None 15 | 16 | def search_group_title(target, title, **kwargs): 17 | """search org for an existing group with title 18 | 19 | args: 20 | target -- target GIS to search 21 | group -- group with title to search 22 | **kwargs -- all additional arguments described here 23 | https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html#arcgis.gis.GroupManager.search 24 | """ 25 | s_items = target.groups.search(title, **kwargs) 26 | for s_item in s_items: 27 | if s_item.title == title: 28 | return s_item 29 | return None 30 | 31 | def clone_items_modify(items, target, 32 | modify_item_callback=None, modify_group_callback=None, **kwargs): 33 | """Clone groups and items to a target GIS 34 | * Abstraction over arcgis.gis.ContentManager.clone_items to: 35 | - Provide callbacks to update properties, sometimes shouldn't have 1:1 copy 36 | 37 | args: 38 | items -- items/groups to be cloned. specifying groups will clone groups & their items 39 | target -- target gis where items or groups will be cloned 40 | modify_item_callback -- callback to update Item properties, expects: 41 | args: (item_clone, target_gis) 42 | returns: flattened dict of args, eg {'title':'<>'; 'data':'<>'}, args here: 43 | https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html?highlight=clone_items#arcgis.gis.Item.update 44 | 45 | modify_group_callback -- callback to update Group properties, expects: 46 | args: (group_clone, expected_title, target_gis) 47 | returns: dict of args here: 48 | https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html?highlight=clone_items#arcgis.gis.Group.update 49 | 50 | **kwargs: 51 | all additional args described here: 52 | https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html?highlight=clone_items#arcgis.gis.ContentManager.clone_items 53 | """ 54 | 55 | from arcgis.gis import Group 56 | from arcgis.gis import Item 57 | 58 | # clone the items 59 | results = target.content.clone_items(items, **kwargs) 60 | 61 | # update the cloned properties 62 | 63 | # list of all the groups 64 | source_groups = [item for item in items if isinstance(item, Group)] 65 | 66 | # update each result 67 | # clone_items automatically changes group name if name already exists in target 68 | # this requires additional logic to pass back the expected name 69 | for result in results: 70 | if isinstance(result, Item) and modify_item_callback: 71 | props = modify_item_callback(result, target) 72 | args = {} 73 | args['data'] = props.pop('data', None) 74 | args['thumbnail'] = props.pop('thumbnail', None) 75 | args['metadata'] = props.pop('metadata', None) 76 | args['item_properties'] = props 77 | result.update(**args) 78 | elif isinstance(result, Group) and modify_group_callback: 79 | source_tag = next(tag for tag in result.tags if 'source-' in tag) 80 | source_id = source_tag[7:] 81 | source_group = next(group for group in source_groups if group.id == source_id) 82 | props = modify_group_callback(result, source_group.title, target) 83 | result.update(**props) 84 | 85 | return results 86 | -------------------------------------------------------------------------------- /partnerutils/cool_utils.py: -------------------------------------------------------------------------------- 1 | """Functions that I want to remember and hopefully you will too!""" 2 | 3 | import glob 4 | import os 5 | import json 6 | import pandas as pd 7 | 8 | def chunk(row, n=1000): 9 | """chunk generator function for breaking up requests 10 | such as for Esri's geocoder 11 | 12 | args: 13 | row - the object to be chunked 14 | n - chunk size 15 | """ 16 | for i in range(0, len(row), n): 17 | yield row[i:i + n] 18 | 19 | def chunk_df(df, n=1000): 20 | """chunk generator function for breaking up requests with dataframes 21 | 22 | args: 23 | df - the dataframe to be chunked 24 | n - chunk size 25 | """ 26 | 27 | for i in range(0, len(df), n): 28 | yield df.iloc[i:i + n].copy() 29 | 30 | def csvs_to_df(dir_path): 31 | """concats all csvs in a directory to one dataframe 32 | 33 | args: 34 | dir_path - path to dir containing csvs 35 | """ 36 | 37 | all_csvs = glob.glob(os.path.join(dir_path, '*.csv')) 38 | return pd.concat((pd.read_csv(f) for f in all_csvs if os.stat(f).st_size > 0)) 39 | 40 | def extract(obj, keys, **kwargs): 41 | """returns a nested object value for the specified keys 42 | 43 | args: 44 | obj -- the nested object containing the desired value 45 | keys -- list of keys to drill through object 46 | """ 47 | required = kwargs.pop('required', False) 48 | default = kwargs.pop('default', None) 49 | warn = kwargs.pop('warn', False) 50 | 51 | o = obj 52 | for i in range(0, len(keys)): 53 | try: 54 | o = o[keys[i]] 55 | except (KeyError, IndexError): 56 | if warn: 57 | print('Warning key does not exist. Key: {0} in Keys: {1}'.format(keys[i], keys)) 58 | if required and default == None: 59 | raise KeyError('Required key does not exist in object and no default') 60 | return default 61 | return o 62 | 63 | def d_extract(obj, keys_delimited, **kwargs): 64 | """returns a nested object value for '.' delimited keys 65 | 66 | args: 67 | obj -- the nested object containing the desired value 68 | keys -- a '.' delimited string of keys to drill through""" 69 | keys = keys_delimited.split('.') 70 | return extract(obj, keys, **kwargs) 71 | 72 | def read_json(path): 73 | """Read in a JSON file as a dictionary 74 | 75 | args: 76 | path - path to the JSON file""" 77 | if os.path.exists(path): 78 | with open(path, 'r') as f: 79 | return json.load(f) 80 | return {} 81 | 82 | def write_json(path, obj): 83 | """Write a dictionary to a file 84 | 85 | args: 86 | path -- path to the output JSON file 87 | obj -- dictionary to write""" 88 | with open(path, 'w') as f: 89 | json.dump(obj, f) 90 | 91 | def memoize(f): 92 | """Decorator to memoize function calls that receive a list 93 | as their first argument, useful to avoid expensive operations 94 | 95 | args: 96 | f -- the function to memoize""" 97 | 98 | cache = {} 99 | 100 | def execute(input_list, *args, **kwargs): 101 | cache_path = kwargs.pop('cache_path', None) 102 | get_key = kwargs.pop('get_key', lambda i: str(i)) 103 | 104 | if cache_path: 105 | cache.update(read_json(cache_path)) 106 | 107 | keys = set() 108 | process = [] 109 | for i in input_list: 110 | key = get_key(i) 111 | if key not in cache and key not in keys: 112 | process.append(i) 113 | keys.add(key) 114 | 115 | if len(process) > 0: 116 | results = f(process, *args, **kwargs) 117 | for i, p in enumerate(process): 118 | cache[get_key(p)] = results[i] 119 | 120 | if cache_path: 121 | write_json(cache_path, cache) 122 | 123 | return [cache[get_key(i)] for i in input_list] 124 | 125 | return execute -------------------------------------------------------------------------------- /common_workflows/standard_geography.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bulk Standard Geography\n", 8 | "A notebook to enrich **[standard geography](https://developers.arcgis.com/rest/geoenrichment/api-reference/standard-geography-query.htm) ids**, such as `census block groups`, with their geometries and publish as a geographic layer. This uses the [`standard_geography_query`](https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.geoenrichment.html#standard-geography-query) function and there's a blog about these queries [here](https://blogs.esri.com/esri/arcgis/2017/07/10/best-practices-how-to-query-standard-geographies-branches/)." 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "# common imports\n", 18 | "import pandas as pd\n", 19 | "from arcgis.gis import GIS\n", 20 | "from arcgis.geoenrichment import standard_geography_query" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`cool_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/cool_utils.py)" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "from partnerutils.cool_utils import chunk" 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [ 43 | "## User Input\n", 44 | "* **GIS**: Your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/). Not specifying a password creates a password prompt" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "gis = GIS(username=\"mpayson_startups\")" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "* **csv_path**: Path to the `csv` with geography IDs\n", 61 | "* **lyr_title**: Title for output hosted layer\n", 62 | "* **layers**: The standard geography layers whose geometry you want to fetch. Here's the [US list](http://geoenrich.arcgis.com/arcgis/rest/services/World/GeoenrichmentServer/Geoenrichment/StandardGeographyLevels/US/census?f=pjson) of layers.\n", 63 | "* **f_std_id**: The `csv` field with the ids corresponding to a standard geography layer" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "csv_path = '../sample_data/sample_census_tract_geoid.csv'\n", 73 | "lyr_title = 'test out'\n", 74 | "layers = ['US.Tracts']\n", 75 | "f_std_id = 'census_tract_geoid'" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "## Execution" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "# read df\n", 92 | "df = pd.read_csv(csv_path, dtype={f_std_id: 'object'})" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "# get and concat geometries, chunking because of request time-outs\n", 102 | "ids = [i for i in df[f_std_id]]\n", 103 | "gdf = pd.concat((standard_geography_query(layers=layers, ids=c, return_geometry=True) for c in chunk(ids)), ignore_index=True)" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": null, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "# merge geometries\n", 113 | "sdf = pd.merge(gdf, df, left_on='AreaID', right_on=f_std_id)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "# Send it!\n", 123 | "sdf.spatial.to_featurelayer(lyr_title, tags=\"esri_partner_tools_sample\")" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "## Clean up" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 140 | "gis.content.delete_items(delete_items)" 141 | ] 142 | } 143 | ], 144 | "metadata": { 145 | "kernelspec": { 146 | "display_name": "Python 3", 147 | "language": "python", 148 | "name": "python3" 149 | }, 150 | "language_info": { 151 | "codemirror_mode": { 152 | "name": "ipython", 153 | "version": 3 154 | }, 155 | "file_extension": ".py", 156 | "mimetype": "text/x-python", 157 | "name": "python", 158 | "nbconvert_exporter": "python", 159 | "pygments_lexer": "ipython3", 160 | "version": "3.7.4" 161 | } 162 | }, 163 | "nbformat": 4, 164 | "nbformat_minor": 4 165 | } 166 | -------------------------------------------------------------------------------- /feature_layers/manage_indexes.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Manage Indexes\n", 8 | "[Feature Layer](http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.toc.html#featurelayer) || [the Manager](http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.managers.html?highlight=manager#featurelayermanager) || [About indexes](https://en.wikipedia.org/wiki/Database_index)" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "metadata": { 15 | "collapsed": true, 16 | "jupyter": { 17 | "outputs_hidden": true 18 | } 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "from arcgis.gis import GIS\n", 23 | "from arcgis.features import FeatureLayer" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": { 30 | "collapsed": true, 31 | "jupyter": { 32 | "outputs_hidden": true 33 | } 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "gis = GIS(username=\"mpayson_startups\")" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": { 44 | "collapsed": true, 45 | "jupyter": { 46 | "outputs_hidden": true 47 | } 48 | }, 49 | "outputs": [], 50 | "source": [ 51 | "lyr = FeatureLayer(\"\", gis=gis)" 52 | ] 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "metadata": {}, 57 | "source": [ 58 | "### Existing Indexes" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": { 65 | "collapsed": true, 66 | "jupyter": { 67 | "outputs_hidden": true 68 | } 69 | }, 70 | "outputs": [], 71 | "source": [ 72 | "lyr.properties.indexes" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "### Rebuild Indexes\n", 80 | "[Update endpoint](https://developers.arcgis.com/rest/services-reference/update-definition-feature-layer-.htm)" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": { 87 | "collapsed": true, 88 | "jupyter": { 89 | "outputs_hidden": true 90 | } 91 | }, 92 | "outputs": [], 93 | "source": [ 94 | "# build serializable dictionary instead of PropertyMap\n", 95 | "index_list = [dict(i) for i in lyr.properties.indexes]\n", 96 | "update_dict = {\"indexes\": index_list}" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "metadata": { 103 | "collapsed": true, 104 | "jupyter": { 105 | "outputs_hidden": true 106 | } 107 | }, 108 | "outputs": [], 109 | "source": [ 110 | "# \"updating\" existing indexes will rebuild them\n", 111 | "lyr.manager.update_definition(update_dict)" 112 | ] 113 | }, 114 | { 115 | "cell_type": "markdown", 116 | "metadata": {}, 117 | "source": [ 118 | "### Add Index\n", 119 | "[Add to definition endpoint](http://resources.arcgis.com/en/help/arcgis-rest-api/#/Add_to_Definition_Feature_Layer/02r300000228000000/).\n", 120 | "\n", 121 | "You can use this endpoint to add new indexes. These indexes can be for one or multiple fields. More on [multiple indexes vs multi-column indexes](https://stackoverflow.com/questions/179085/multiple-indexes-vs-multi-column-indexes/179109#179109)." 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": { 128 | "collapsed": true, 129 | "jupyter": { 130 | "outputs_hidden": true 131 | } 132 | }, 133 | "outputs": [], 134 | "source": [ 135 | "# see available fields\n", 136 | "lyr.properties.fields" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "metadata": { 143 | "collapsed": true, 144 | "jupyter": { 145 | "outputs_hidden": true 146 | } 147 | }, 148 | "outputs": [], 149 | "source": [ 150 | "new_index = {\n", 151 | " \"name\" : \"\", \n", 152 | " \"fields\" : \"\"\n", 153 | "# \"isUnique\" : False,\n", 154 | "# \"isAscending\" : False,\n", 155 | "# \"description\" : \"MY INDEX\" \n", 156 | "}\n", 157 | "add_dict = {\"indexes\" : [new_index]}" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "metadata": { 164 | "collapsed": true, 165 | "jupyter": { 166 | "outputs_hidden": true 167 | } 168 | }, 169 | "outputs": [], 170 | "source": [ 171 | "lyr.manager.add_to_definition(add_dict)" 172 | ] 173 | } 174 | ], 175 | "metadata": { 176 | "kernelspec": { 177 | "display_name": "Python 3", 178 | "language": "python", 179 | "name": "python3" 180 | }, 181 | "language_info": { 182 | "codemirror_mode": { 183 | "name": "ipython", 184 | "version": 3 185 | }, 186 | "file_extension": ".py", 187 | "mimetype": "text/x-python", 188 | "name": "python", 189 | "nbconvert_exporter": "python", 190 | "pygments_lexer": "ipython3", 191 | "version": "3.8.5" 192 | } 193 | }, 194 | "nbformat": 4, 195 | "nbformat_minor": 4 196 | } 197 | -------------------------------------------------------------------------------- /feature_layers/enable_time.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Enable time\n", 8 | "ArcGIS allows you to [enable time on feature layers](https://doc.arcgis.com/en/arcgis-online/create-maps/configure-time.htm). This does a couple of things -- to the user, it will make time sliders and other UI components automatically show up in ArcGIS clients to respect the time settings of the layer. To the service, it will add time metadata (start / end) as well as an index to speed up time-based queries.\n", 9 | "\n", 10 | "This is a simple Python script to show doing this automatically. To update the metadata (eg when the underlying data changes), rerun the same process on the existing layer." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "from arcgis.gis import GIS\n", 20 | "from arcgis.features import FeatureLayer" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "## Variables\n", 28 | "* **gis**: your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/)\n", 29 | "* **file_path**: the path to the GeoJSON file used to create a demo layer\n", 30 | "* **time_field**: the field representing time for a given feature / record" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 2, 36 | "metadata": {}, 37 | "outputs": [ 38 | { 39 | "name": "stdin", 40 | "output_type": "stream", 41 | "text": [ 42 | "Enter password: ········\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "gis = GIS(username=\"mpayson_geodev\")" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 3, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "file_path = \"../sample_data/NYC_Restaurant_Inspections.geojson\"\n", 57 | "time_field = \"RECORD_DAT\"" 58 | ] 59 | }, 60 | { 61 | "cell_type": "markdown", 62 | "metadata": {}, 63 | "source": [ 64 | "## Initialization" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 4, 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "data": { 74 | "text/plain": [ 75 | "" 76 | ] 77 | }, 78 | "execution_count": 4, 79 | "metadata": {}, 80 | "output_type": "execute_result" 81 | } 82 | ], 83 | "source": [ 84 | "# publish a new layer item for demo purposes\n", 85 | "item = gis.content.add({\n", 86 | " \"type\": \"GeoJson\",\n", 87 | " \"title\": \"My GeoJSON\",\n", 88 | " \"tags\": \"esri_partner_tools_sample\"\n", 89 | "}, data='../sample_data/NYC_Restaurant_Inspections.geojson')\n", 90 | "lyr_item = item.publish()\n", 91 | "\n", 92 | "# get a reference to the feature layer\n", 93 | "lyr = FeatureLayer.fromitem(lyr_item)\n", 94 | "lyr" 95 | ] 96 | }, 97 | { 98 | "cell_type": "markdown", 99 | "metadata": {}, 100 | "source": [ 101 | "### Enable time! This is the important part" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "data": { 111 | "text/plain": [ 112 | "{'success': True}" 113 | ] 114 | }, 115 | "execution_count": 5, 116 | "metadata": {}, 117 | "output_type": "execute_result" 118 | } 119 | ], 120 | "source": [ 121 | "update_dict = {\n", 122 | " \"timeInfo\": {\n", 123 | " \"startTimeField\": time_field,\n", 124 | " # use to represent a time range, not single event\n", 125 | " # \"endTimeField\": end_field\n", 126 | " }\n", 127 | "}\n", 128 | "lyr.manager.add_to_definition(update_dict)" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "## Clean Up" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 6, 141 | "metadata": {}, 142 | "outputs": [ 143 | { 144 | "data": { 145 | "text/plain": [ 146 | "True" 147 | ] 148 | }, 149 | "execution_count": 6, 150 | "metadata": {}, 151 | "output_type": "execute_result" 152 | } 153 | ], 154 | "source": [ 155 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 156 | "gis.content.delete_items(delete_items)" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": null, 162 | "metadata": {}, 163 | "outputs": [], 164 | "source": [] 165 | } 166 | ], 167 | "metadata": { 168 | "kernelspec": { 169 | "display_name": "Python 3", 170 | "language": "python", 171 | "name": "python3" 172 | }, 173 | "language_info": { 174 | "codemirror_mode": { 175 | "name": "ipython", 176 | "version": 3 177 | }, 178 | "file_extension": ".py", 179 | "mimetype": "text/x-python", 180 | "name": "python", 181 | "nbconvert_exporter": "python", 182 | "pygments_lexer": "ipython3", 183 | "version": "3.8.5" 184 | } 185 | }, 186 | "nbformat": 4, 187 | "nbformat_minor": 4 188 | } 189 | -------------------------------------------------------------------------------- /feature_layers/shapefile_upload.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bulk Shapefile Upload\n", 8 | "A notebook to read shapefiles and upload their data as [hosted feature layers](https://doc.arcgis.com/en/arcgis-online/share-maps/hosted-web-layers.htm) in ArcGIS" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "metadata": { 15 | "collapsed": true, 16 | "jupyter": { 17 | "outputs_hidden": true 18 | } 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "# common imports\n", 23 | "import os\n", 24 | "from arcgis.gis import *\n", 25 | "import shutil" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "## User Input\n", 33 | "\n", 34 | "* **gis**: your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/)\n", 35 | "* **dir_path**: path to directory with the Shapefiles" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": { 42 | "collapsed": true, 43 | "jupyter": { 44 | "outputs_hidden": true 45 | } 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "gis = GIS(\"https://www.arcgis.com\", \"\", \"\")\n", 50 | "\n", 51 | "dir_path = \"\"" 52 | ] 53 | }, 54 | { 55 | "cell_type": "markdown", 56 | "metadata": {}, 57 | "source": [ 58 | "## Execution" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": { 65 | "collapsed": true, 66 | "jupyter": { 67 | "outputs_hidden": true 68 | } 69 | }, 70 | "outputs": [], 71 | "source": [ 72 | "# get an array of all the shapefiles in the directory\n", 73 | "shapefiles = [file for file in os.listdir(dir_path) if file.endswith('.shp')]\n", 74 | "shp_paths = [os.path.join(dir_path, file) for file in shapefiles]\n", 75 | "print(\"Pushing {0} shapefile(s)\".format(len(shp_paths)))" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": { 82 | "collapsed": true, 83 | "jupyter": { 84 | "outputs_hidden": true 85 | } 86 | }, 87 | "outputs": [], 88 | "source": [ 89 | "# create a new folder in the GIS to store the layers (if it doesn't already exist)\n", 90 | "folder = os.path.basename(dir_path)\n", 91 | "gis_folder = gis.content.create_folder(folder)\n", 92 | "\n", 93 | "# handle folder already exists\n", 94 | "gis_folder = folder if gis_folder is None else gis_folder" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "metadata": { 101 | "collapsed": true, 102 | "jupyter": { 103 | "outputs_hidden": true 104 | } 105 | }, 106 | "outputs": [], 107 | "source": [ 108 | "# loop through shapefiles, zip them for upload, upload layers to GIS, move to new folder\n", 109 | "for i, shp_path in enumerate(shp_paths):\n", 110 | " # create a temp dir to work in\n", 111 | " with tempfile.TemporaryDirectory() as temp_dir:\n", 112 | " # copy all shapefile files into temp dir and zip the dir.\n", 113 | " temp_shp_dir = os.path.join(temp_dir, os.path.splitext(os.path.basename(shp_path))[0])\n", 114 | " os.makedirs(temp_shp_dir)\n", 115 | " for ext in ['.shp', '.dbf', '.cpg', '.prj', '.sbn', '.sbx', '.shp.xml', '.shx']:\n", 116 | " if os.path.exists(os.path.splitext(shp_path)[0] + ext):\n", 117 | " shutil.copy(os.path.splitext(shp_path)[0]+ext, temp_shp_dir)\n", 118 | " \n", 119 | " shp_zip = shutil.make_archive(temp_shp_dir, 'zip', root_dir=temp_shp_dir)\n", 120 | " \n", 121 | " # publish the shapefile zip\n", 122 | " title = os.path.splitext(os.path.basename(shp_path))[0]\n", 123 | " item = gis.content.add(data=shp_zip, item_properties={\n", 124 | " \"title\": title,\n", 125 | " 'type': 'Shapefile',\n", 126 | " \"tags\": title})\n", 127 | " # see also: https://developers.arcgis.com/rest/services-reference/feature-service.htm\n", 128 | " features_service = item.publish({\n", 129 | " \"name\": os.path.splitext(item['name'])[0],\n", 130 | " \"hasStaticData\": True,\n", 131 | " \"layerInfo\": {\"capabilities\": \"Query\"}\n", 132 | " })\n", 133 | " \n", 134 | " # move the newly uploaded item to the folder created earlier\n", 135 | " item.move(folder)\n", 136 | " features_service.move(folder)\n", 137 | " \n", 138 | " print(\"{0}/{1}\".format(i + 1, len(shp_paths)))\n" 139 | ] 140 | } 141 | ], 142 | "metadata": { 143 | "kernelspec": { 144 | "display_name": "Python 3", 145 | "language": "python", 146 | "name": "python3" 147 | }, 148 | "language_info": { 149 | "codemirror_mode": { 150 | "name": "ipython", 151 | "version": 3 152 | }, 153 | "file_extension": ".py", 154 | "mimetype": "text/x-python", 155 | "name": "python", 156 | "nbconvert_exporter": "python", 157 | "pygments_lexer": "ipython3", 158 | "version": "3.7.4" 159 | } 160 | }, 161 | "nbformat": 4, 162 | "nbformat_minor": 4 163 | } 164 | -------------------------------------------------------------------------------- /feature_layers/csv_upload.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bulk CSV Upload\n", 8 | "A notebook to read csvs and upload their data as [hosted feature layers](https://doc.arcgis.com/en/arcgis-online/share-maps/hosted-web-layers.htm) in ArcGIS" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "# common imports\n", 18 | "import os\n", 19 | "import pandas as pd\n", 20 | "from arcgis.gis import GIS" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`feature_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/feature_utils.py)" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 2, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "from partnerutils.feature_utils import sdf_from_xyz" 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [ 43 | "## User Input\n", 44 | "\n", 45 | "* **gis**: your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/)\n", 46 | "* **dir_path**: path to directory with the CSVs\n", 47 | "* **coord_fields**: map Point fields to CSV columns" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 4, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "name": "stdin", 57 | "output_type": "stream", 58 | "text": [ 59 | "Enter password: ········\n" 60 | ] 61 | } 62 | ], 63 | "source": [ 64 | "# this will prompt for a password\n", 65 | "# can also do GIS(\"https://www.arcgis.com\", \"\", \"\")\n", 66 | "gis = GIS(username=\"mpayson_startups\")" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 5, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "dir_path = \"../sample_data\"\n", 76 | "coord_fields = {\n", 77 | " \"x\": \"Longitude\",\n", 78 | " \"y\": \"Latitude\",\n", 79 | " #\"z\": \"\"\n", 80 | "}" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "## Execution" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": 6, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "# create a new folder in the GIS to store the layers\n", 97 | "folder = os.path.basename(dir_path)\n", 98 | "gis_folder = gis.content.create_folder(folder)\n", 99 | "\n", 100 | "# handle folder already exists\n", 101 | "gis_folder = folder if gis_folder is None else gis_folder" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 7, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "name": "stdout", 111 | "output_type": "stream", 112 | "text": [ 113 | "Pushing 1 csvs\n" 114 | ] 115 | } 116 | ], 117 | "source": [ 118 | "# get an array of all the csvs in the directory\n", 119 | "csvs = [file for file in os.listdir(dir_path) if file.endswith('.csv')]\n", 120 | "csv_paths = [os.path.join(dir_path, csv) for csv in csvs]\n", 121 | "n_paths = len(csv_paths)\n", 122 | "print(\"Pushing {0} csvs\".format(n_paths))" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 8, 128 | "metadata": {}, 129 | "outputs": [ 130 | { 131 | "name": "stdout", 132 | "output_type": "stream", 133 | "text": [ 134 | "1/1\n" 135 | ] 136 | } 137 | ], 138 | "source": [ 139 | "# loop through csvs, build spatial dataframe, upload layers to GIS, move to new folder\n", 140 | "z_field = coord_fields['z'] if 'z' in coord_fields else None\n", 141 | "items = []\n", 142 | "for i, csv_path in enumerate(csv_paths):\n", 143 | " df = pd.read_csv(csv_path)\n", 144 | " sdf = sdf_from_xyz(df, coord_fields['x'], coord_fields['y'], z_field)\n", 145 | " \n", 146 | " title = os.path.splitext(os.path.basename(csv_path))[0]\n", 147 | " lyr_item = gis.content.import_data(sdf, title=title, tags=\"esri_partner_tools_sample\")\n", 148 | " lyr_item.move(gis_folder)\n", 149 | " \n", 150 | " items.append(lyr_item)\n", 151 | " print(\"{0}/{1}\".format(i + 1, n_paths))" 152 | ] 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "metadata": {}, 157 | "source": [ 158 | "## Clean Up" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 15, 164 | "metadata": {}, 165 | "outputs": [ 166 | { 167 | "data": { 168 | "text/plain": [ 169 | "True" 170 | ] 171 | }, 172 | "execution_count": 15, 173 | "metadata": {}, 174 | "output_type": "execute_result" 175 | } 176 | ], 177 | "source": [ 178 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 179 | "gis.content.delete_items(delete_items)\n", 180 | "gis.content.delete_folder(gis_folder['title'])" 181 | ] 182 | } 183 | ], 184 | "metadata": { 185 | "kernelspec": { 186 | "display_name": "Python 3", 187 | "language": "python", 188 | "name": "python3" 189 | }, 190 | "language_info": { 191 | "codemirror_mode": { 192 | "name": "ipython", 193 | "version": 3 194 | }, 195 | "file_extension": ".py", 196 | "mimetype": "text/x-python", 197 | "name": "python", 198 | "nbconvert_exporter": "python", 199 | "pygments_lexer": "ipython3", 200 | "version": "3.7.3" 201 | } 202 | }, 203 | "nbformat": 4, 204 | "nbformat_minor": 4 205 | } 206 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Esri Partner Tools 2 | 3 | > Useful tools for Esri Partners built with the [ArcGIS API for Python](https://developers.arcgis.com/python/) 4 | 5 |
6 | Contents 7 | 8 | 9 | * [About](#about) 10 | * [Prerequisites](#prerequisites) 11 | * [Contents](#contents) 12 | * [Getting Started](#getting-started) 13 | * [Sample Data](#sample-data) 14 | * [Issues and Contributing](#issues-and-contributing) 15 | 16 |
17 | 18 | ## About 19 | 20 | Partners working with Esri and ArcGIS implement many common workflows. The [ArcGIS API for Python](https://developers.arcgis.com/python/) is an awesome automation library. This repo is meant to be a collection of POC scripts to automate some of these workflows. 21 | 22 | While much of the code is in Jupyter Notebooks, it can easily be ported to pure python to run on the server or as headless apps. [`partnerutils/`](/partnerutils) can also be installed as a local package: 23 | > `$ pip install -q -U git+https://github.com/mpayson/esri-partner-tools` 24 | 25 | ## Prerequisites 26 | 27 | * Install the [ArcGIS API for Python](https://developers.arcgis.com/python/) ([instructions](https://developers.arcgis.com/python/guide/install-and-set-up/)) 28 | * Access to [Jupyter Notebooks](http://jupyter.org/) 29 | 30 | ## Contents 31 | 32 | * **[`partnerutils/`](/partnerutils) - Functions that I've found helpful** 33 | * [`cool_utils.py`](/partnerutils/cool_utils.py) - functions I want to remember and hopefully you will too! 34 | * [`etl_utils.py`](/partnerutils/etl_utils.py) - assist with common ETL logic 35 | * [`user_utils.py`](/partnerutils/user_utils.py) - assist with adding users 36 | * [`clone_utils.py`](/partnerutils/clone_utils.py) - assist with cloning groups & items 37 | * [`feature_utils.py`](partnerutils/feature_utils.py) - assist with features and feature data types 38 | * **[`common_workflows/`](/common_workflows) - Common workflows with the Python API** 39 | * [`csv_geocode.ipynb`](/common_workflows/csv_geocode.ipynb) - [geocode](https://developers.arcgis.com/features/geocoding/) rows in `csvs` and `dataframes` 40 | * [`vector_data_products.ipynb`](/common_workflows/vector_data_products.ipynb) - end-to-end workflows for managing vector content and derivative information products 41 | * [`distribute_items.ipynb`](/common_workflows/distribute_items.ipynb) - common patterns for distributing items to another organization 42 | * [`standard_geography.ipynb`](/common_workflows/standard_geography.ipynb) - enrich [standard geography](https://developers.arcgis.com/rest/geoenrichment/api-reference/standard-geography-query.htm) ids, such as `census blocks`, with geometries 43 | * **[`feature_layers/`](/feature_layers) - Common operations with [hosted feature layers](https://doc.arcgis.com/en/arcgis-online/share-maps/hosted-web-layers.htm)** 44 | * [`csv_upload.ipynb`](/feature_layers/csv_upload.ipynb) - upload a folder of `csvs` & `dataframes` 45 | * [`shapefile_upload.ipynb`](/feature_layers/shapefile_upload.ipynb) - upload a folder of `Shapefiles` 46 | * [`geojson_upload.ipynb`](/feature_layers/geojson_upload.ipynb) - upload a geojson file 47 | * [`update_data.ipynb`](/feature_layers/update_data.ipynb) - a couple different workflows for updating uploaded / hosted data 48 | * [`create_views.ipynb`](/feature_layers/create_views.ipynb) - create database views with separate permissions against one authoritative layer 49 | * [`manage_fields.ipynb`](/feature_layers/manage_fields.ipynb) - view and edit fields 50 | * [`manage_indexes.ipynb`](/feature_layers/manage_indexes.ipynb) - view, edit, and refresh indexes 51 | * [`enable_time.ipynb`](/feature_layers/enable_time.ipynb) - add time metadata that will be reflected in ArcGIS app UIs 52 | * **[`build_org/`](/build_org) - Automate new ArcGIS Online deployments** 53 | * [`clone_groups.ipynb`](/build_org/clone_groups.ipynb) - clone groups and their items 54 | * [`configure_org.ipynb`](/build_org/configure_org.ipynb) - customize org UI, create groups, & add users 55 | * [`create_share_group.ipynb`](/build_org/create_share_group.ipynb) - create a [group](https://doc.arcgis.com/en/arcgis-online/share-maps/groups.htm) and invite members to share content with your users 56 | * [`register_application.ipynb`](/build_org/register_application.ipynb) automatically create and [register an app](https://developers.arcgis.com/documentation/core-concepts/security-and-authentication/signing-in-arcgis-online-users/) 57 | 58 | ## Getting Started 59 | 60 | Many samples use [`partnerutils`](/partnerutils). To use this package, either copy & paste the functions as specified in each notebook OR: 61 | 62 | `$ pip install -q -U git+https://github.com/mpayson/esri-partner-tools` 63 | 64 | This will install the `partnerutils` as a local package in your active environment. The utilities can then be used as follows 65 | 66 | ```python 67 | from arcgis.gis import GIS 68 | from partnerutils.processing_utils import batch_geocode_memo 69 | 70 | gis = GIS(username="username", password="password") 71 | addresses = ['El Burrito Redlands CA', '380 New York St Redlands CA'] 72 | results = batch_geocode_memo(addresses) 73 | 74 | print(results) 75 | ``` 76 | 77 | Shout out to Ryan @ SafeGraph for showing me this is [a thing](https://github.com/SafeGraphInc/safegraph_py). Otherwise, the notebooks should give enough detail to get started. If not, **[holler](https://github.com/mpayson/esri-partner-tools/issues)**! 78 | 79 | ## Sample Data 80 | 81 | I included some sample data for testing and trialing: 82 | * [`NYC_Restaurant_Inspections.csv`](/sample_data/NYC_Restaurant_Inspections.csv) - a slice of DOHMH New York City Restaurant Inspection Results. [Source](https://data.cityofnewyork.us/Health/DOHMH-New-York-City-Restaurant-Inspection-Results/43nn-pn8j]) 83 | * [`sample_census_tract_geoid.csv`](/sample_data/sample_census_tract_geoid.csv) - a couple census tract geoids. Copied from [here](https://geo.nyu.edu/catalog/nyu-2451-34513) 84 | 85 | ## Issues and Contributing 86 | 87 | Want to request a new sample? Have a question? Would [__love__](https://github.com/mpayson/esri-partner-tools/issues) to hear from you. 88 | 89 | And PRs always welcome! 90 | -------------------------------------------------------------------------------- /feature_layers/manage_fields.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Manage Fields\n", 8 | "[Feature Layer](http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.toc.html#featurelayer) || [the Manager](http://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.managers.html?highlight=manager#featurelayermanager) || [About field types](http://pro.arcgis.com/en/pro-app/tool-reference/data-management/add-field.htm) || [Rest endpoint](http://resources.arcgis.com/en/help/arcgis-rest-api/#/Add_to_Definition_Feature_Layer/02r300000228000000/)" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "from arcgis.gis import GIS\n", 18 | "from arcgis.features import FeatureLayer" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": null, 24 | "metadata": {}, 25 | "outputs": [], 26 | "source": [ 27 | "gis = GIS(username=\"mpayson_startups\")" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "### Initialization" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "# create the feature layer in ArcGIS Online from a geojson file\n", 44 | "item = gis.content.add({\n", 45 | " \"type\": \"GeoJson\",\n", 46 | " \"title\": \"My GeoJSON\",\n", 47 | " \"tags\": \"esri_partner_tools_sample\"\n", 48 | "}, data='../sample_data/NYC_Restaurant_Inspections.geojson')\n", 49 | "lyr_item = item.publish()\n", 50 | "\n", 51 | "# define the feature layer\n", 52 | "lyr = FeatureLayer.fromitem(lyr_item)\n", 53 | "lyr" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "## Existing fields" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "lyr.properties.fields" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "## Change field display name" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "metadata": {}, 83 | "outputs": [], 84 | "source": [ 85 | "update_dict = {\"fields\": dict(lyr.properties)['fields']} " 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [ 94 | "names_to_change = {\"PHONE\": \"Phone Number\"}" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "metadata": {}, 101 | "outputs": [], 102 | "source": [ 103 | "for val in update_dict['fields']:\n", 104 | " if val['name'] in names_to_change:\n", 105 | " val['alias'] = names_to_change[val['name']]" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "lyr.manager.update_definition(update_dict)" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "metadata": {}, 120 | "source": [ 121 | "## Change field description" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "update_dict = {\"fields\": dict(lyr.properties)['fields']} " 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "desc_to_change = {\"PHONE\": \"This field represents phone numbers\"}\n", 140 | "for val in update_dict['fields']:\n", 141 | " if val['name'] in desc_to_change:\n", 142 | " val['']" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "metadata": {}, 148 | "source": [ 149 | "## Add field" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": null, 155 | "metadata": {}, 156 | "outputs": [], 157 | "source": [ 158 | "new_field = {\n", 159 | " \"name\": \"TEST_STR_FIELD\",\n", 160 | " \"type\": \"esriFieldTypeString\",\n", 161 | " \"alias\": \"TEST_STR_FIELD\",\n", 162 | " \"length\": 256,\n", 163 | " \"nullable\": True,\n", 164 | " \"editable\": True,\n", 165 | " \"visible\": True,\n", 166 | " \"domain\": None\n", 167 | "}\n", 168 | "update_dict = {\"fields\": [new_field]}" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "lyr.manager.add_to_definition(update_dict)" 178 | ] 179 | }, 180 | { 181 | "cell_type": "markdown", 182 | "metadata": {}, 183 | "source": [ 184 | "## Clean Up" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": null, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 194 | "gis.content.delete_items(delete_items)" 195 | ] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": null, 200 | "metadata": {}, 201 | "outputs": [], 202 | "source": [] 203 | } 204 | ], 205 | "metadata": { 206 | "kernelspec": { 207 | "display_name": "Python 3", 208 | "language": "python", 209 | "name": "python3" 210 | }, 211 | "language_info": { 212 | "codemirror_mode": { 213 | "name": "ipython", 214 | "version": 3 215 | }, 216 | "file_extension": ".py", 217 | "mimetype": "text/x-python", 218 | "name": "python", 219 | "nbconvert_exporter": "python", 220 | "pygments_lexer": "ipython3", 221 | "version": "3.8.5" 222 | } 223 | }, 224 | "nbformat": 4, 225 | "nbformat_minor": 4 226 | } 227 | -------------------------------------------------------------------------------- /common_workflows/csv_geocode.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Bulk CSV Geocode\n", 8 | "*A notebook to [geocode](https://developers.arcgis.com/features/geocoding/) rows in a `csv`*" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "metadata": { 15 | "collapsed": true, 16 | "jupyter": { 17 | "outputs_hidden": true 18 | } 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "# common imports\n", 23 | "import csv\n", 24 | "from arcgis.gis import GIS\n", 25 | "from arcgis.geocoding import batch_geocode" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`cool_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/cool_utils.py)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": { 39 | "collapsed": true, 40 | "jupyter": { 41 | "outputs_hidden": true 42 | } 43 | }, 44 | "outputs": [], 45 | "source": [ 46 | "from partnerutils.cool_utils import chunk" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "## User Input\n", 54 | "* **GIS**: Your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/). Not specifying a password creates a password prompt" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": { 61 | "collapsed": true, 62 | "jupyter": { 63 | "outputs_hidden": true 64 | } 65 | }, 66 | "outputs": [], 67 | "source": [ 68 | "gis = GIS(username=\"mpayson_startups\")" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "* **csv_path**: path to the `csv` to be geocoded\n", 76 | "* **out_path**: path to the output `csv`\n", 77 | "* **address_fields**: map geocode request fields to csv fields, can also be a SingleLine field string--more [here](https://developers.arcgis.com/rest/geocode/api-reference/geocoding-geocode-addresses.htm) " 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": { 84 | "collapsed": true, 85 | "jupyter": { 86 | "outputs_hidden": true 87 | } 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "csv_path = \"NYC Inspection test.csv\"\n", 92 | "out_path = \"Geocode Results.csv\"\n", 93 | "address_fields = {\n", 94 | " \"Address\": \"ADDRESS\",\n", 95 | " \"City\": \"CITY\",\n", 96 | " \"Region\": \"STATE\",\n", 97 | " \"Postal\": \"ZIPCODE\"\n", 98 | "}" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "metadata": {}, 104 | "source": [ 105 | "## Functions" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": { 112 | "collapsed": true, 113 | "jupyter": { 114 | "outputs_hidden": true 115 | } 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "# translate csv fields to geocode request fields\n", 120 | "def format_request_address(row, address_fields):\n", 121 | " if type(address_fields) == str:\n", 122 | " return row[address_fields]\n", 123 | " return {field: row[address_fields[field]] for field in address_fields}\n", 124 | "\n", 125 | "# get output location dictionary from geocode result\n", 126 | "def get_location(res):\n", 127 | " if res:\n", 128 | " return {\n", 129 | " \"x\": res['location']['x'],\n", 130 | " \"y\": res['location']['y'],\n", 131 | " \"score\": res['score']\n", 132 | " }\n", 133 | " return {}" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "## Execution" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": null, 146 | "metadata": { 147 | "collapsed": true, 148 | "jupyter": { 149 | "outputs_hidden": true 150 | } 151 | }, 152 | "outputs": [], 153 | "source": [ 154 | "# read the csv\n", 155 | "with open(csv_path, 'r') as data_csv:\n", 156 | " rows = csv.DictReader(data_csv)\n", 157 | " data = [row for row in rows]" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "metadata": { 164 | "collapsed": true, 165 | "jupyter": { 166 | "outputs_hidden": true 167 | } 168 | }, 169 | "outputs": [], 170 | "source": [ 171 | "output = []\n", 172 | "\n", 173 | "# iterate through chunks, format the address requests, geocode, merge location results with chunk data\n", 174 | "for c in chunk(data):\n", 175 | " adrs = [format_request_address(r, address_fields) for r in c]\n", 176 | " results = batch_geocode(addresses=adrs)\n", 177 | " chunk_out = [{**r, **get_location(results[i])} for i, r in enumerate(c)]\n", 178 | " output += chunk_out" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": null, 184 | "metadata": { 185 | "collapsed": true, 186 | "jupyter": { 187 | "outputs_hidden": true 188 | } 189 | }, 190 | "outputs": [], 191 | "source": [ 192 | "# write output\n", 193 | "with open(out_path, 'w') as csvfile:\n", 194 | " writer = csv.DictWriter(csvfile, fieldnames=output[0].keys())\n", 195 | " writer.writeheader()\n", 196 | " for row in output:\n", 197 | " writer.writerow(row)" 198 | ] 199 | } 200 | ], 201 | "metadata": { 202 | "kernelspec": { 203 | "display_name": "Python 3", 204 | "language": "python", 205 | "name": "python3" 206 | }, 207 | "language_info": { 208 | "codemirror_mode": { 209 | "name": "ipython", 210 | "version": 3 211 | }, 212 | "file_extension": ".py", 213 | "mimetype": "text/x-python", 214 | "name": "python", 215 | "nbconvert_exporter": "python", 216 | "pygments_lexer": "ipython3", 217 | "version": "3.7.4" 218 | } 219 | }, 220 | "nbformat": 4, 221 | "nbformat_minor": 4 222 | } 223 | -------------------------------------------------------------------------------- /partnerutils/etl_utils.py: -------------------------------------------------------------------------------- 1 | """******************************************** 2 | * A couple ETL utility functions for working with ArcGIS 3 | ********************************************""" 4 | import tempfile 5 | import json 6 | import datetime 7 | 8 | DEFAULT_TITLE = 'GeoJSON Utils POC' 9 | DEFAULT_TAG = 'geojson-utils-poc' 10 | 11 | def date_to_ags(date): 12 | """Returns an ArcGIS-formatted date from a Python date object 13 | 14 | args: 15 | date - Python date object""" 16 | tz = datetime.timezone.utc 17 | return date.astimezone(tz).strftime('%m/%d/%Y %H:%M:%S') 18 | 19 | def timestamp_to_ags(timestamp): 20 | """Returns an ArcGIS-formatted date from a timestamp 21 | 22 | args: 23 | timestamp -- timestamp in milliseconds since epoch""" 24 | seconds = timestamp / 1000 25 | date = datetime.datetime.fromtimestamp(seconds) 26 | return date_to_ags(date) 27 | 28 | def _add_unique_index(layer, field): 29 | """Adds a unique index for upsert operations to update, or avoid duplicating, existing rows 30 | 31 | args: 32 | layer -- where to add the index 33 | field -- the field to index 34 | """ 35 | new_index = { 36 | "name": "External UID", 37 | "fields": field, 38 | "isUnique": True, 39 | "description": "External UID for upsert operations" 40 | } 41 | add_dict = {"indexes" : [new_index]} 42 | return layer.manager.add_to_definition(add_dict) 43 | 44 | def add_geojson(gis, geojson, **item_options): 45 | """Uploads geojson and returns the file item 46 | 47 | args: 48 | gis -- gis object where item is added 49 | geojson -- geojson object to upload as file 50 | item_options -- additional item properties, see here: 51 | https://developers.arcgis.com/python/api-reference/arcgis.gis.toc.html#arcgis.gis.ContentManager.add""" 52 | 53 | # get default args 54 | title = item_options.pop('title', DEFAULT_TITLE) 55 | tags = item_options.pop('tags', DEFAULT_TAG) 56 | 57 | # save geojson to tempfile and add as item 58 | with tempfile.NamedTemporaryFile(mode="w", suffix='.json') as fp: 59 | fp.write(json.dumps(geojson)) 60 | item = gis.content.add({ 61 | **item_options, 62 | 'type': 'GeoJson', 63 | 'title': title, 64 | 'tags': tags, 65 | }, data=fp.name) 66 | 67 | return item 68 | 69 | def append_to_layer(gis, layer, geojson, uid_field=None): 70 | """Appends geojson to an existing service and returns the results 71 | 72 | Note, this is the best approach for bulk updates in ArcGIS Online. 73 | There are other options here, such as transactional edits 74 | > https://github.com/mpayson/esri-partner-tools/blob/master/feature_layers/update_data.ipynb 75 | 76 | args: 77 | gis -- gis object where the layers live 78 | layer -- FeatureLayer to be updated 79 | geojson -- geojson object to add to the layer 80 | uid_field -- identifies existing features to update with new features (must be uniquely indexed) 81 | """ 82 | 83 | item = add_geojson(gis, geojson, title="Data update") 84 | result = None 85 | 86 | try: 87 | # if there's a uid_field make sure it's indexed before append 88 | indexes = layer.properties.indexes 89 | if uid_field and not any(i['fields'] == uid_field for i in indexes): 90 | _add_unique_index(layer, uid_field) 91 | 92 | result = layer.append( 93 | item_id=item.id, 94 | upload_format="geojson", 95 | upsert=(uid_field != None), 96 | upsert_matching_field=uid_field # update existing features with matching uid_fields 97 | ) 98 | finally: 99 | item.delete() # if not deleted next run will eror and pollute ArcGIS 100 | 101 | return result 102 | 103 | def create_layer(gis, geojson, template_item): 104 | """Publishes geojson as a hosted service based on an existing template item 105 | and returns the resulting layer item 106 | 107 | args: 108 | gis -- gis where the layer should live 109 | geojson -- initial geojson to populate the layer 110 | template_item -- existing Item that has been pre-configured with desired properties""" 111 | 112 | results = gis.content.clone_items([template_item], copy_data=False, search_existing_items=False) 113 | item = results[0] 114 | lyr = item.layers[0] 115 | 116 | append_to_layer(gis, lyr, geojson) 117 | 118 | return item 119 | 120 | def create_scratch_layer(gis, geojson, uid_field=None, **item_options): 121 | """Publishes geojson as a hosted service and returns the layer item 122 | 123 | Note, use this to quickly add geojson with system default properties. In production, 124 | it's easier to set desired properties on a template layer then use create_layer. 125 | 126 | args: 127 | gis -- gis where the layer should live 128 | geojson -- initial geojson to populate the layer 129 | uid_field -- global uid field that can be used to determine existing features on updates 130 | item_options -- additional item properties, see here: 131 | https://developers.arcgis.com/python/api-reference/arcgis.gis.toc.html#arcgis.gis.ContentManager.add""" 132 | 133 | item = add_geojson(gis, geojson, **item_options) 134 | try: 135 | lyr_item = item.publish() 136 | finally: 137 | item.delete() 138 | 139 | # add a unique index for upsert operations so don't duplicate rows 140 | if uid_field: 141 | lyr = lyr_item.layers[0] 142 | _add_unique_index(lyr, uid_field) 143 | 144 | return lyr_item 145 | 146 | def get_existing_item(gis, tags=None): 147 | """Searches for an existing layer item and returns it 148 | 149 | Note, for now this just assumes there's just one layer item for the tags 150 | 151 | args: 152 | gis -- gis to search 153 | tags -- tags to search for layers within the gis""" 154 | t = tags if tags else DEFAULT_TAG 155 | search_items = gis.content.search('tags:"{0}" AND type:"Feature Service"'.format(t)) 156 | 157 | return search_items[0] if len(search_items) > 0 else None 158 | 159 | def delete_before(lyr, date, field): 160 | """Deletes all features in a layer before a given date 161 | 162 | args: 163 | lyr -- the feature layer with features to delete 164 | date -- the date before which to delete features 165 | field -- the date field""" 166 | where = "{0} < '{1}'".format(field, date_to_ags(date)) 167 | return lyr.delete_features(where=where) 168 | 169 | def delete_before_days(lyr, number_days, field): 170 | """Deletes all features with dates before the specified 171 | number of days back from today 172 | 173 | args: 174 | lyr -- the feature layer with features to delete 175 | number_days -- the number of days back before which to delete features 176 | field -- the date field 177 | """ 178 | dt = datetime.datetime.today() - datetime.timedelta(number_days) 179 | return delete_before(lyr, dt, field) 180 | -------------------------------------------------------------------------------- /feature_layers/geojson_upload.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# GeoJSON Upload\n", 8 | "A notebook to read GeoJSON and upload the data as a [hosted feature layer](https://doc.arcgis.com/en/arcgis-online/share-maps/hosted-web-layers.htm) in ArcGIS" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "from arcgis.gis import GIS" 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "## User Input" 25 | ] 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "metadata": {}, 30 | "source": [ 31 | "**gis**: your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/)" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 2, 37 | "metadata": {}, 38 | "outputs": [ 39 | { 40 | "name": "stdin", 41 | "output_type": "stream", 42 | "text": [ 43 | "Enter password: ········\n" 44 | ] 45 | } 46 | ], 47 | "source": [ 48 | "gis = GIS(username=\"mpayson_geodev\")" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "**file_path**: path to GeoJSON file, could also fetch from a remote API and write a temporary file" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 3, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "file_path = \"../sample_data/NYC_Restaurant_Inspections.geojson\"" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": {}, 70 | "source": [ 71 | "## Execution" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 5, 77 | "metadata": {}, 78 | "outputs": [ 79 | { 80 | "data": { 81 | "text/html": [ 82 | "
\n", 83 | "
\n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | "
\n", 88 | "\n", 89 | "
\n", 90 | " My GeoJSON\n", 91 | " \n", 92 | "
GeoJson by mpayson_geodev\n", 93 | "
Last Modified: July 06, 2020\n", 94 | "
0 comments, 0 views\n", 95 | "
\n", 96 | "
\n", 97 | " " 98 | ], 99 | "text/plain": [ 100 | "" 101 | ] 102 | }, 103 | "execution_count": 5, 104 | "metadata": {}, 105 | "output_type": "execute_result" 106 | } 107 | ], 108 | "source": [ 109 | "# create the item, this will just add the file to ArcGIS Online\n", 110 | "item = gis.content.add({\n", 111 | " \"type\": \"GeoJson\",\n", 112 | " \"title\": \"My GeoJSON\",\n", 113 | " \"tags\": \"esri_partner_tools_sample\"\n", 114 | "}, data='../sample_data/NYC_Restaurant_Inspections.geojson')\n", 115 | "item" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 6, 121 | "metadata": {}, 122 | "outputs": [ 123 | { 124 | "data": { 125 | "text/html": [ 126 | "
\n", 127 | "
\n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | "
\n", 132 | "\n", 133 | "
\n", 134 | " My GeoJSON\n", 135 | " \n", 136 | "
Feature Layer Collection by mpayson_geodev\n", 137 | "
Last Modified: July 06, 2020\n", 138 | "
0 comments, 0 views\n", 139 | "
\n", 140 | "
\n", 141 | " " 142 | ], 143 | "text/plain": [ 144 | "" 145 | ] 146 | }, 147 | "execution_count": 6, 148 | "metadata": {}, 149 | "output_type": "execute_result" 150 | } 151 | ], 152 | "source": [ 153 | "# publish the item, this will make the item available as a hosted service\n", 154 | "lyr_item = item.publish()\n", 155 | "lyr_item" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "## Clean Up" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 7, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "data": { 172 | "text/plain": [ 173 | "True" 174 | ] 175 | }, 176 | "execution_count": 7, 177 | "metadata": {}, 178 | "output_type": "execute_result" 179 | } 180 | ], 181 | "source": [ 182 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 183 | "gis.content.delete_items(delete_items)" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [] 192 | } 193 | ], 194 | "metadata": { 195 | "kernelspec": { 196 | "display_name": "Python 3", 197 | "language": "python", 198 | "name": "python3" 199 | }, 200 | "language_info": { 201 | "codemirror_mode": { 202 | "name": "ipython", 203 | "version": 3 204 | }, 205 | "file_extension": ".py", 206 | "mimetype": "text/x-python", 207 | "name": "python", 208 | "nbconvert_exporter": "python", 209 | "pygments_lexer": "ipython3", 210 | "version": "3.8.5" 211 | } 212 | }, 213 | "nbformat": 4, 214 | "nbformat_minor": 4 215 | } 216 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | **NOTE, THIS IS NOT MEANT TO BE PUBLISHED 3 | 4 | ADAPTED FROM: A setuptools based setup module. 5 | See: 6 | https://packaging.python.org/en/latest/distributing.html 7 | https://github.com/pypa/sampleproject 8 | """ 9 | 10 | # Always prefer setuptools over distutils 11 | from setuptools import setup 12 | # To use a consistent encoding 13 | from codecs import open 14 | from os import path 15 | 16 | here = path.abspath(path.dirname(__file__)) 17 | 18 | # Get the long description from the README file 19 | with open(path.join(here, 'README.md'), encoding='utf-8') as f: 20 | long_description = f.read() 21 | 22 | # Arguments marked as "Required" below must be included for upload to PyPI. 23 | # Fields marked as "Optional" may be commented out. 24 | 25 | setup( 26 | # This is the name of your project. The first time you publish this 27 | # package, this name will be registered for you. It will determine how 28 | # users can install this project, e.g.: 29 | # 30 | # $ pip install sampleproject 31 | # 32 | # And where it will live on PyPI: https://pypi.org/project/sampleproject/ 33 | # 34 | # There are some restrictions on what makes a valid project name 35 | # specification here: 36 | # https://packaging.python.org/specifications/core-metadata/#name 37 | name='partnerutils', # Required 38 | 39 | # Versions should comply with PEP 440: 40 | # https://www.python.org/dev/peps/pep-0440/ 41 | # 42 | # For a discussion on single-sourcing the version across setup.py and the 43 | # project code, see 44 | # https://packaging.python.org/en/latest/single_source_version.html 45 | version='0.0.1', # Required 46 | 47 | # This is a one-line description or tagline of what your project does. This 48 | # corresponds to the "Summary" metadata field: 49 | # https://packaging.python.org/specifications/core-metadata/#summary 50 | description='Partner utils for ArcGIS', # Required 51 | 52 | # This is an optional longer description of your project that represents 53 | # the body of text which users will see when they visit PyPI. 54 | # 55 | # Often, this is the same as your README, so you can just read it in from 56 | # that file directly (as we have already done above) 57 | # 58 | # This field corresponds to the "Description" metadata field: 59 | # https://packaging.python.org/specifications/core-metadata/#description-optional 60 | long_description=long_description, # Optional 61 | 62 | # Denotes that our long_description is in Markdown; valid values are 63 | # text/plain, text/x-rst, and text/markdown 64 | # 65 | # Optional if long_description is written in reStructuredText (rst) but 66 | # required for plain-text or Markdown; if unspecified, "applications should 67 | # attempt to render [the long_description] as text/x-rst; charset=UTF-8 and 68 | # fall back to text/plain if it is not valid rst" (see link below) 69 | # 70 | # This field corresponds to the "Description-Content-Type" metadata field: 71 | # https://packaging.python.org/specifications/core-metadata/#description-content-type-optional 72 | long_description_content_type='text/markdown', # Optional (see note above) 73 | 74 | # This should be a valid link to your project's main homepage. 75 | # 76 | # This field corresponds to the "Home-Page" metadata field: 77 | # https://packaging.python.org/specifications/core-metadata/#home-page-optional 78 | url='https://github.com/mpayson/partner-python-tools', # Optional 79 | 80 | # This should be your name or the name of the organization which owns the 81 | # project. 82 | author='Max Payson', # Optional 83 | 84 | # This should be a valid email address corresponding to the author listed 85 | # above. 86 | author_email='mpayson@esri.com', # Optional 87 | 88 | # Classifiers help users find your project by categorizing it. 89 | # 90 | # For a list of valid classifiers, see https://pypi.org/classifiers/ 91 | # classifiers=[ # Optional 92 | # # How mature is this project? Common values are 93 | # # 3 - Alpha 94 | # # 4 - Beta 95 | # # 5 - Production/Stable 96 | # 'Development Status :: 3 - Alpha', 97 | 98 | # # Indicate who your project is intended for 99 | # 'Intended Audience :: Developers', 100 | # 'Topic :: Software Development :: Build Tools', 101 | 102 | # # Pick your license as you wish 103 | # 'License :: OSI Approved :: MIT License', 104 | 105 | # # Specify the Python versions you support here. In particular, ensure 106 | # # that you indicate whether you support Python 2, Python 3 or both. 107 | # 'Programming Language :: Python :: 2', 108 | # 'Programming Language :: Python :: 2.7', 109 | # 'Programming Language :: Python :: 3', 110 | # 'Programming Language :: Python :: 3.4', 111 | # 'Programming Language :: Python :: 3.5', 112 | # 'Programming Language :: Python :: 3.6', 113 | # ], 114 | 115 | # This field adds keywords for your project which will appear on the 116 | # project page. What does your project relate to? 117 | # 118 | # Note that this is a string of words separated by whitespace, not a list. 119 | keywords='esri arcgis startups partners gis webgis development', # Optional 120 | 121 | # You can just specify package directories manually here if your project is 122 | # simple. Or you can use find_packages(). 123 | # 124 | # Alternatively, if you just want to distribute a single Python file, use 125 | # the `py_modules` argument instead as follows, which will expect a file 126 | # called `my_module.py` to exist: 127 | # 128 | # py_modules=["my_module"], 129 | # 130 | packages=['partnerutils'], # Required 131 | 132 | # This field lists other packages that your project depends on to run. 133 | # Any package you put here will be installed by pip when your project is 134 | # installed, so they must be valid existing projects. 135 | # 136 | # For an analysis of "install_requires" vs pip's requirements files see: 137 | # https://packaging.python.org/en/latest/requirements.html 138 | install_requires=['arcgis', 'pandas'], # Optional 139 | 140 | # List additional groups of dependencies here (e.g. development 141 | # dependencies). Users will be able to install these using the "extras" 142 | # syntax, for example: 143 | # 144 | # $ pip install sampleproject[dev] 145 | # 146 | # Similar to `install_requires` above, these must be valid existing 147 | # projects. 148 | # extras_require={ # Optional 149 | # 'dev': ['check-manifest'], 150 | # 'test': ['coverage'], 151 | # }, 152 | 153 | # If there are data files included in your packages that need to be 154 | # installed, specify them here. 155 | # 156 | # If using Python 2.6 or earlier, then these have to be included in 157 | # MANIFEST.in as well. 158 | # package_data={ # Optional 159 | # 'sample': ['package_data.dat'], 160 | # }, 161 | 162 | # Although 'package_data' is the preferred approach, in some case you may 163 | # need to place data files outside of your packages. See: 164 | # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files 165 | # 166 | # In this case, 'data_file' will be installed into '/my_data' 167 | # data_files=[('my_data', ['data/data_file'])], # Optional 168 | 169 | # To provide executable scripts, use entry points in preference to the 170 | # "scripts" keyword. Entry points provide cross-platform support and allow 171 | # `pip` to create the appropriate form of executable for the target 172 | # platform. 173 | # 174 | # For example, the following would provide a command called `sample` which 175 | # executes the function `main` from this package when invoked: 176 | # entry_points={ # Optional 177 | # 'console_scripts': [ 178 | # 'sample=sample:main', 179 | # ], 180 | # }, 181 | 182 | # List additional URLs that are relevant to your project as a dict. 183 | # 184 | # This field corresponds to the "Project-URL" metadata fields: 185 | # https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use 186 | # 187 | # Examples listed include a pattern for specifying where the package tracks 188 | # issues, where the source is hosted, where to say thanks to the package 189 | # maintainers, and where to support the project financially. The key is 190 | # what's used to render the link text on PyPI. 191 | project_urls={ # Optional 192 | 'Bug Reports': 'https://github.com/mpayson/partner-python-tools', 193 | 'Say Thanks!': 'https://github.com/mpayson/partner-python-tools', 194 | 'Source': 'https://github.com/mpayson/partner-python-tools', 195 | }, 196 | ) -------------------------------------------------------------------------------- /common_workflows/distribute_items.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Distribute Items\n", 8 | "Jupyter Notebook to show different patterns for distributing items to another organization.\n", 9 | "\n", 10 | "The patterns can include: [share items](#Share-Item) || [copy & share items](#Copy-then-Share-Item) || [clone items](#Clone-Item) || [clone and modify applications](#Clone-and-Modify-Applications) and more\n", 11 | "\n", 12 | "> ***Note** [Groups](https://doc.arcgis.com/en/arcgis-online/share-maps/groups.htm) can be used to share data so that you maintain ownership without cloning the content. To set up groups, see [configure](https://github.com/mpayson/esri-partner-tools/blob/master/build_org/configure_org.ipynb) for new users, or [create_share](https://github.com/mpayson/esri-partner-tools/blob/master/build_org/create_share_group.ipynb) for existing users." 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "from arcgis.gis import GIS, Item" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`clone_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/clone_utils.py)" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "from partnerutils.clone_utils import search_group_title, clone_items_modify, search_item_title" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "src = GIS(username=\"mpayson_startups\")\n", 47 | "tar = GIS(username=\"mspatialstartups\")" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": {}, 53 | "source": [ 54 | "#### Example Constants" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "GROUP_NAMES = ['Max Test']\n", 64 | "\n", 65 | "SHARE_ITEMID = \"90dffd24537240a59eede871ade5856a\"\n", 66 | "COPY_ITEMID = \"90dffd24537240a59eede871ade5856a\"\n", 67 | "CLONE_ITEMID = \"90dffd24537240a59eede871ade5856a\"\n", 68 | "APP_ID = \"1bbda506fdfd4c82a90fea57426f5603\"\n", 69 | "\n", 70 | "NAME_TEMPLATE = \"{} -- \" + tar.properties['urlKey']\n", 71 | "FOLDER = \"Content -- \" + tar.properties['urlKey']" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "groups = [search_group_title(target=src, title=n, outside_org=True) for n in GROUP_NAMES]\n", 81 | "groups" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "### Modify Functions\n", 89 | "Sometimes you don't want a 1:1 copy, these functions will modify the cloned item properties. A few notes:\n", 90 | "* `modify_item_callback` receives the cloned item and its gis. It should return a flattened dict of properties [here](https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.gis.toc.html?highlight=clone_items#arcgis.gis.Item.update)\n", 91 | "* The default behavior is to update the item to match the NAME_TEMPLATE" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": null, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "def modify_item_callback(item, target_gis):\n", 101 | " title = NAME_TEMPLATE.format(item.title)\n", 102 | " while search_item_title(target_gis, title):\n", 103 | " title = input(\"Title `{0}` for ITEM `{1}` already exists \\n new title: \"\n", 104 | " .format(title, item.title))\n", 105 | " return {\"title\": title}" 106 | ] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "metadata": {}, 111 | "source": [ 112 | "## Execution" 113 | ] 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "metadata": {}, 118 | "source": [ 119 | "### Share Item\n", 120 | "Shares an item with an existing group. Use this pattern if you want to own and maintain single authoritative items that all of your user organizations can access." 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": null, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "share_item = Item(src, SHARE_ITEMID)" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "share_item.share(groups=groups)" 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "metadata": {}, 144 | "source": [ 145 | "### Copy then Share Item\n", 146 | "Copies and shares an item to an existing group. Use this pattern if you have a template item and you want each user organization to access a copy while **you own and maintain the copies**." 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "copy_item = Item(src, COPY_ITEMID)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": null, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "paste_items = clone_items_modify([copy_item], src,\n", 165 | " modify_item_callback=modify_item_callback,\n", 166 | " copy_data=False, search_existing_items=False,\n", 167 | " folder=FOLDER)\n", 168 | "paste_share_res = [i.share(groups=groups) for i in paste_items]\n", 169 | "paste_share_res" 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "metadata": {}, 175 | "source": [ 176 | "### Clone Item\n", 177 | "Clones an item to a target organization. Use this pattern if you have a template item and you want **each user organization to own their copy**." 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": null, 183 | "metadata": {}, 184 | "outputs": [], 185 | "source": [ 186 | "clone_item = Item(src, CLONE_ITEMID)" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "clones = clone_items_modify([clone_item], tar,\n", 196 | " modify_item_callback=modify_item_callback,\n", 197 | " copy_data=True, search_existing_items=False,\n", 198 | " folder=FOLDER)\n", 199 | "clones" 200 | ] 201 | }, 202 | { 203 | "cell_type": "markdown", 204 | "metadata": {}, 205 | "source": [ 206 | "### Clone and Modify Applications\n", 207 | "Clones an application (or webmap) then swizzles the dependencies. Use this pattern if you have a template application/map that should be configured for data that your user organizations can now access.\n", 208 | "\n", 209 | "By default, the clone_items functions will also clone dependencies. This example shows how to reference the newly shared item via a group, so that the end-organization owns the application but you maintain the underlying data." 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "metadata": {}, 216 | "outputs": [], 217 | "source": [ 218 | "app_item = Item(src, APP_ID)\n", 219 | "item_map = {copy_item.id: paste_items[0].id}\n", 220 | "item_map" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": null, 226 | "metadata": {}, 227 | "outputs": [], 228 | "source": [ 229 | "app_clones = clone_items_modify([app_item], tar,\n", 230 | " modify_item_callback=modify_item_callback,\n", 231 | " copy_data=False, search_existing_items=False,\n", 232 | " folder=FOLDER,\n", 233 | " item_mapping=item_map)\n", 234 | "app_clones" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": null, 240 | "metadata": {}, 241 | "outputs": [], 242 | "source": [] 243 | } 244 | ], 245 | "metadata": { 246 | "kernelspec": { 247 | "display_name": "Python 3", 248 | "language": "python", 249 | "name": "python3" 250 | }, 251 | "language_info": { 252 | "codemirror_mode": { 253 | "name": "ipython", 254 | "version": 3 255 | }, 256 | "file_extension": ".py", 257 | "mimetype": "text/x-python", 258 | "name": "python", 259 | "nbconvert_exporter": "python", 260 | "pygments_lexer": "ipython3", 261 | "version": "3.7.4" 262 | } 263 | }, 264 | "nbformat": 4, 265 | "nbformat_minor": 4 266 | } 267 | -------------------------------------------------------------------------------- /feature_layers/create_views.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Create views\n", 8 | "\n", 9 | "Create new [view items](https://doc.arcgis.com/en/arcgis-online/manage-data/create-hosted-views.htm) from feature layer items. View items are akin to database views--they can have separate permissions and capabilities as well as limited access to a subset of the underlying data. This means you can maintain a single, authoritative layer while controlling how different groups of users can access and interact with that layer." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "# common imports\n", 19 | "import os\n", 20 | "import pandas as pd\n", 21 | "from arcgis.gis import GIS\n", 22 | "from arcgis.features import FeatureLayerCollection, FeatureLayer" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`feature_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/feature_utils.py)" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 2, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "from partnerutils.feature_utils import sdf_from_xyz" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "**GIS**: Your GIS instance, parameter information [here](https://developers.arcgis.com/python/guide/using-the-gis/). Not specifying a password creates a password prompt" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 3, 51 | "metadata": {}, 52 | "outputs": [ 53 | { 54 | "name": "stdin", 55 | "output_type": "stream", 56 | "text": [ 57 | "Enter password: ········\n" 58 | ] 59 | } 60 | ], 61 | "source": [ 62 | "gis = GIS(username=\"mpayson_startups\")" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "### Create demo layer" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 11, 75 | "metadata": {}, 76 | "outputs": [ 77 | { 78 | "data": { 79 | "text/html": [ 80 | "
\n", 81 | "
\n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | "
\n", 86 | "\n", 87 | "
\n", 88 | " Base demo layer\n", 89 | " \n", 90 | "
Feature Layer Collection by mpayson_startups\n", 91 | "
Last Modified: November 06, 2019\n", 92 | "
0 comments, 0 views\n", 93 | "
\n", 94 | "
\n", 95 | " " 96 | ], 97 | "text/plain": [ 98 | "" 99 | ] 100 | }, 101 | "execution_count": 11, 102 | "metadata": {}, 103 | "output_type": "execute_result" 104 | } 105 | ], 106 | "source": [ 107 | "df = pd.read_csv('../sample_data/NYC_Restaurant_Inspections.csv', encoding='utf-8')\n", 108 | "sdf = sdf_from_xyz(df, 'Longitude', 'Latitude')\n", 109 | "lyr_item = sdf.spatial.to_featurelayer(\n", 110 | " 'Base demo layer',\n", 111 | " tags=\"esri_partner_tools_sample\"\n", 112 | ")\n", 113 | "lyr_item" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": {}, 119 | "source": [ 120 | "## Execution" 121 | ] 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "metadata": {}, 126 | "source": [ 127 | "### Create view item - [doc](https://developers.arcgis.com/python/api-reference/arcgis.features.managers.html?highlight=create_view#arcgis.features.managers.FeatureLayerCollectionManager.create_view)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 12, 133 | "metadata": {}, 134 | "outputs": [ 135 | { 136 | "data": { 137 | "text/html": [ 138 | "
\n", 139 | "
\n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | "
\n", 144 | "\n", 145 | "
\n", 146 | " Base demo view\n", 147 | " \n", 148 | "
Feature Layer Collection by mpayson_startups\n", 149 | "
Last Modified: November 06, 2019\n", 150 | "
0 comments, 0 views\n", 151 | "
\n", 152 | "
\n", 153 | " " 154 | ], 155 | "text/plain": [ 156 | "" 157 | ] 158 | }, 159 | "execution_count": 12, 160 | "metadata": {}, 161 | "output_type": "execute_result" 162 | } 163 | ], 164 | "source": [ 165 | "# A feature service can have many layers, so a FeatureLayerCollection represents\n", 166 | "# the underlying service. Often, like through the Python API, newly created layers\n", 167 | "# create new services as well. So the desired layer is the first layer in the service.\n", 168 | "flc = FeatureLayerCollection.fromitem(lyr_item)\n", 169 | "view_item = flc.manager.create_view('Base demo view')\n", 170 | "view_item.update(item_properties={'tags': 'esri_partner_tools_sample'})\n", 171 | "view_item" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": 6, 177 | "metadata": {}, 178 | "outputs": [ 179 | { 180 | "name": "stdout", 181 | "output_type": "stream", 182 | "text": [ 183 | "# features: 5000, # fields: 27\n" 184 | ] 185 | } 186 | ], 187 | "source": [ 188 | "view_lyr = view_item.layers[0]\n", 189 | "\n", 190 | "# create a reference sdf so we can compare what data is available in the view\n", 191 | "def print_reference(ref_view_lyr):\n", 192 | " ref_sdf = ref_view_lyr.query().sdf\n", 193 | " print(\"# features: {}, # fields: {}\".format(len(ref_sdf), len(ref_sdf.columns)))\n", 194 | "\n", 195 | "print_reference(view_lyr)" 196 | ] 197 | }, 198 | { 199 | "cell_type": "markdown", 200 | "metadata": {}, 201 | "source": [ 202 | "### Update visible data in view" 203 | ] 204 | }, 205 | { 206 | "cell_type": "markdown", 207 | "metadata": {}, 208 | "source": [ 209 | "#### Fields\n", 210 | "Update visible fields--note, objectIdField must be visible" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": 7, 216 | "metadata": {}, 217 | "outputs": [ 218 | { 219 | "name": "stdout", 220 | "output_type": "stream", 221 | "text": [ 222 | "# features: 5000, # fields: 5\n" 223 | ] 224 | } 225 | ], 226 | "source": [ 227 | "keep_field_names = {'GRADE', 'VIOLATION_', 'SCORE'}\n", 228 | "keep_field_names.add(view_lyr.properties.objectIdField)\n", 229 | "fields = list(map(\n", 230 | " lambda f: {\n", 231 | " 'name': f.name,\n", 232 | " 'visible': True if f.name in keep_field_names else False\n", 233 | " },\n", 234 | " view_lyr.properties.fields\n", 235 | "))\n", 236 | "view_lyr.manager.update_definition({\"fields\": fields})\n", 237 | "\n", 238 | "print_reference(view_lyr)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": {}, 244 | "source": [ 245 | "#### Features by attribute\n", 246 | "Update visible rows based on attribute [SQL where clause](https://pro.arcgis.com/en/pro-app/help/mapping/navigation/sql-reference-for-elements-used-in-query-expressions.htm)" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 8, 252 | "metadata": {}, 253 | "outputs": [ 254 | { 255 | "name": "stdout", 256 | "output_type": "stream", 257 | "text": [ 258 | "# features: 3955, # fields: 5\n" 259 | ] 260 | } 261 | ], 262 | "source": [ 263 | "where = \"GRADE = 'A'\"\n", 264 | "view_lyr.manager.update_definition({\"viewDefinitionQuery\": where})\n", 265 | "\n", 266 | "print_reference(view_lyr)" 267 | ] 268 | }, 269 | { 270 | "cell_type": "markdown", 271 | "metadata": {}, 272 | "source": [ 273 | "#### Features by geometry\n", 274 | "Update visible rows based on a geography. A couple notes, can pass in a JSON representation of an [envelope](https://developers.arcgis.com/python/api-reference/arcgis.geometry.html#envelope) or [polygon](https://developers.arcgis.com/python/api-reference/arcgis.geometry.html#polygon). There's probably overhead to arbitray or complex polygons compared to envelopes. Also, the geometry coordinates should be specified in the same coordinate reference system as the map, this is often WebMercator (`wkid #102100`)." 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 13, 280 | "metadata": {}, 281 | "outputs": [ 282 | { 283 | "name": "stdout", 284 | "output_type": "stream", 285 | "text": [ 286 | "# features: 1917, # fields: 27\n" 287 | ] 288 | } 289 | ], 290 | "source": [ 291 | "# Get Manhattan geometry, this is on the complex side. Data pulled from Hub\n", 292 | "# https://hub.arcgis.com/datasets/498c7ff03780407494301c23cb59b899_0\n", 293 | "buro_lyr = FeatureLayer('https://services1.arcgis.com/oOUgp466Coyjcu6V/arcgis/rest/services/NYC_Boroughs/FeatureServer/0')\n", 294 | "buro_fs = buro_lyr.query(out_sr=102100)\n", 295 | "manhattan_feature = list(filter(\n", 296 | " lambda f: f.attributes['boro_name'] == 'Manhattan',\n", 297 | " buro_fs.features\n", 298 | "))[0]\n", 299 | "manhattan_geom = manhattan_feature.geometry\n", 300 | "manhattan_geom['spatialReference'] = buro_fs.spatial_reference\n", 301 | "\n", 302 | "geo_definition = {\n", 303 | " \"filter\": {\n", 304 | " \"operator\": \"esriSpatialRelIntersects\",\n", 305 | " \"value\": {\n", 306 | " \"geometryType\": buro_fs.geometry_type,\n", 307 | " \"geometry\": manhattan_geom\n", 308 | " }\n", 309 | " }\n", 310 | "}\n", 311 | "view_lyr.manager.update_definition({\"viewLayerDefinition\": geo_definition})\n", 312 | "\n", 313 | "print_reference(view_lyr)" 314 | ] 315 | }, 316 | { 317 | "cell_type": "markdown", 318 | "metadata": {}, 319 | "source": [ 320 | "## Clean up" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": 10, 326 | "metadata": {}, 327 | "outputs": [ 328 | { 329 | "data": { 330 | "text/plain": [ 331 | "True" 332 | ] 333 | }, 334 | "execution_count": 10, 335 | "metadata": {}, 336 | "output_type": "execute_result" 337 | } 338 | ], 339 | "source": [ 340 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 341 | "gis.content.delete_items(delete_items)" 342 | ] 343 | } 344 | ], 345 | "metadata": { 346 | "kernelspec": { 347 | "display_name": "Python 3", 348 | "language": "python", 349 | "name": "python3" 350 | }, 351 | "language_info": { 352 | "codemirror_mode": { 353 | "name": "ipython", 354 | "version": 3 355 | }, 356 | "file_extension": ".py", 357 | "mimetype": "text/x-python", 358 | "name": "python", 359 | "nbconvert_exporter": "python", 360 | "pygments_lexer": "ipython3", 361 | "version": "3.7.4" 362 | } 363 | }, 364 | "nbformat": 4, 365 | "nbformat_minor": 4 366 | } 367 | -------------------------------------------------------------------------------- /feature_layers/update_data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Update Data\n", 8 | "A couple different workflows for updating data:\n", 9 | "* Apply edits -- good for small, transactional edits\n", 10 | "* Overwrite -- good for ETL when extracting the entire dataset to replace all the existing data\n", 11 | "* Append -- good for ETL, batch updating existing layers and other" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 1, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import json\n", 21 | "import tempfile\n", 22 | "import os\n", 23 | "import shutil\n", 24 | "import pandas as pd\n", 25 | "from arcgis.gis import GIS\n", 26 | "from arcgis.features import FeatureLayerCollection" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "***Note**, if you are unable to import local `partnerutils`, **copy the following functions** from [`cool_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/cool_utils.py) and [`feature_utils`](https://github.com/mpayson/esri-partner-tools/blob/master/partnerutils/feature_utils.py)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 2, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "from partnerutils.cool_utils import chunk_df\n", 43 | "from partnerutils.feature_utils import sdf_from_xyz" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "metadata": {}, 50 | "outputs": [ 51 | { 52 | "name": "stdin", 53 | "output_type": "stream", 54 | "text": [ 55 | "Enter password: ········\n" 56 | ] 57 | } 58 | ], 59 | "source": [ 60 | "# this will prompt for a password\n", 61 | "# can also do GIS(\"https://www.arcgis.com\", \"\", \"\")\n", 62 | "gis = GIS(username=\"mpayson_startups\")" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "## Apply Edits\n", 70 | "This uses the **[`edit_features`](https://esri.github.io/arcgis-python-api/apidoc/html/arcgis.features.toc.html#arcgis.features.FeatureLayer.edit_features)** function to append data when working with a [Spatially Enabled DataFrame](https://developers.arcgis.com/python/guide/introduction-to-the-spatially-enabled-dataframe/). This method is best for small updates as it sends individual features as JSON and applies row-by-row edits. You can also update an existing feature using the `updates` parameter in `edit_features` by specifying a common Object ID.\n", 71 | "\n", 72 | "Note, after a few updates, I'd [rebuild the indexes](https://github.com/mpayson/esri-partner-tools/blob/master/feature_layers/manage_indexes.ipynb) as they can become unbalanced with many transactional edits." 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 4, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "# path to data\n", 82 | "csv_path = \"../sample_data/NYC_Restaurant_Inspections.csv\"\n", 83 | "x_col = \"Longitude\"\n", 84 | "y_col = \"Latitude\"" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 5, 90 | "metadata": {}, 91 | "outputs": [ 92 | { 93 | "data": { 94 | "text/plain": [ 95 | "5000" 96 | ] 97 | }, 98 | "execution_count": 5, 99 | "metadata": {}, 100 | "output_type": "execute_result" 101 | } 102 | ], 103 | "source": [ 104 | "# read csv and construct spatial dataframe\n", 105 | "df = pd.read_csv(csv_path)\n", 106 | "sdf = sdf_from_xyz(df, x_col, y_col)\n", 107 | "len(sdf)" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 6, 113 | "metadata": {}, 114 | "outputs": [ 115 | { 116 | "data": { 117 | "text/html": [ 118 | "
\n", 119 | "
\n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | "
\n", 124 | "\n", 125 | "
\n", 126 | " MyFeatureService\n", 127 | " \n", 128 | "
Feature Layer Collection by mpayson_startups\n", 129 | "
Last Modified: December 10, 2019\n", 130 | "
0 comments, 0 views\n", 131 | "
\n", 132 | "
\n", 133 | " " 134 | ], 135 | "text/plain": [ 136 | "" 137 | ] 138 | }, 139 | "execution_count": 6, 140 | "metadata": {}, 141 | "output_type": "execute_result" 142 | } 143 | ], 144 | "source": [ 145 | "# iterate through chunks to create and append data\n", 146 | "lyr = None\n", 147 | "for c_df in chunk_df(sdf):\n", 148 | " if not lyr:\n", 149 | " item = c_df.spatial.to_featurelayer(\"MyFeatureService\", tags=\"esri_partner_tools_sample\")\n", 150 | " lyr = item.layers[0]\n", 151 | " else:\n", 152 | " # THIS IS THE APPEND DATA PART\n", 153 | " fs = c_df.spatial.to_featureset()\n", 154 | " success = lyr.edit_features(adds=fs)\n", 155 | "item" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "## Overwrite\n", 163 | "This uses the **[`ovewrite`](https://developers.arcgis.com/python/api-reference/arcgis.features.managers.html?highlight=overwrite#arcgis.features.managers.FeatureLayerCollectionManager.overwrite)** method. It deletes all data in an existing service and replaces it with updated data from the specified file. This is the most efficient method if you are completely updating a service.\n", 164 | "\n", 165 | "Note, the overwriting file type **must be the same** as the type used to create the initial service (more info in doc). If you publish from a data frame (like above) the Python API zips to a FGDB or SHP before publishing, so updating with geojson or CSV won't work." 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": 7, 171 | "metadata": {}, 172 | "outputs": [], 173 | "source": [ 174 | "# get two geojson subsets to showcase the workflow\n", 175 | "file_path = \"../sample_data/NYC_Restaurant_Inspections.geojson\"\n", 176 | "with open(file_path) as file:\n", 177 | " geojson = json.load(file)\n", 178 | "\n", 179 | "features = geojson.pop('features')\n", 180 | "init_geo = {**geojson, **{'features': features[0:3000]}}\n", 181 | "next_geo = {**geojson, **{'features': features[3000:5000]}}" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 8, 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "data": { 191 | "text/html": [ 192 | "
\n", 193 | "
\n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | "
\n", 198 | "\n", 199 | "
\n", 200 | " Inspections\n", 201 | " \n", 202 | "
Feature Layer Collection by mpayson_startups\n", 203 | "
Last Modified: December 10, 2019\n", 204 | "
0 comments, 0 views\n", 205 | "
\n", 206 | "
\n", 207 | " " 208 | ], 209 | "text/plain": [ 210 | "" 211 | ] 212 | }, 213 | "execution_count": 8, 214 | "metadata": {}, 215 | "output_type": "execute_result" 216 | } 217 | ], 218 | "source": [ 219 | "# publish the initial layer\n", 220 | "temp_dir_path = tempfile.mkdtemp()\n", 221 | "temp_file_path = os.path.join(temp_dir_path, 'Inspections.geojson')\n", 222 | "with open(temp_file_path, 'w') as temp_file:\n", 223 | " json.dump(init_geo, temp_file)\n", 224 | "\n", 225 | "item = gis.content.add({\n", 226 | " \"type\": \"GeoJson\",\n", 227 | " \"title\": \"Inspections\",\n", 228 | " \"tags\": \"esri_partner_tools_sample\",\n", 229 | " #... additional properties like descriptions\n", 230 | "}, data=temp_file_path)\n", 231 | "shutil.rmtree(temp_dir_path)\n", 232 | "\n", 233 | "lyr_item = item.publish()\n", 234 | "lyr_item" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": 9, 240 | "metadata": {}, 241 | "outputs": [ 242 | { 243 | "data": { 244 | "text/html": [ 245 | "
\n", 246 | "
\n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | "
\n", 251 | "\n", 252 | "
\n", 253 | " Inspections\n", 254 | " \n", 255 | "
Feature Layer Collection by mpayson_startups\n", 256 | "
Last Modified: December 10, 2019\n", 257 | "
0 comments, 0 views\n", 258 | "
\n", 259 | "
\n", 260 | " " 261 | ], 262 | "text/plain": [ 263 | "" 264 | ] 265 | }, 266 | "execution_count": 9, 267 | "metadata": {}, 268 | "output_type": "execute_result" 269 | } 270 | ], 271 | "source": [ 272 | "# overwrite the layer with second geojson \n", 273 | "temp_dir_path = tempfile.mkdtemp()\n", 274 | "temp_file_path = os.path.join(temp_dir_path, 'Inspections.geojson')\n", 275 | "with open(temp_file_path, 'w') as temp_file:\n", 276 | " json.dump(next_geo, temp_file)\n", 277 | "\n", 278 | "# THIS IS THE IMPORTANT PART\n", 279 | "flc = FeatureLayerCollection.fromitem(lyr_item)\n", 280 | "flc.manager.overwrite(temp_file_path)\n", 281 | "\n", 282 | "shutil.rmtree(temp_dir_path)\n", 283 | "\n", 284 | "lyr_item" 285 | ] 286 | }, 287 | { 288 | "cell_type": "markdown", 289 | "metadata": {}, 290 | "source": [ 291 | "## Append\n", 292 | "This uses the **[`append`](https://developers.arcgis.com/python/api-reference/arcgis.features.toc.html#arcgis.features.FeatureLayer.append)** method (exclusive to ArcGIS Online). It upserts data from a previously existing file item (of any supported type) and is more efficient for larger updates. It can also update existing features. By default, this uses the Object ID attribute to determine which features to update, but you can also specify your own field, this field just [has to be indexed](https://github.com/mpayson/esri-partner-tools/blob/master/feature_layers/manage_indexes.ipynb) in ArcGIS Online." 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": 4, 298 | "metadata": {}, 299 | "outputs": [], 300 | "source": [ 301 | "# get two geojson subsets to showcase the workflow\n", 302 | "file_path = \"../sample_data/NYC_Restaurant_Inspections.geojson\"\n", 303 | "with open(file_path) as file:\n", 304 | " geojson = json.load(file)\n", 305 | "\n", 306 | "features = geojson.pop('features')\n", 307 | "init_geo = {**geojson, **{'features': features[0:3000]}}\n", 308 | "next_geo = {**geojson, **{'features': features[3000:5000]}}" 309 | ] 310 | }, 311 | { 312 | "cell_type": "code", 313 | "execution_count": 6, 314 | "metadata": {}, 315 | "outputs": [ 316 | { 317 | "data": { 318 | "text/html": [ 319 | "
\n", 320 | "
\n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | "
\n", 325 | "\n", 326 | "
\n", 327 | " Inspections\n", 328 | " \n", 329 | "
Feature Layer Collection by mpayson_startups\n", 330 | "
Last Modified: January 29, 2020\n", 331 | "
0 comments, 0 views\n", 332 | "
\n", 333 | "
\n", 334 | " " 335 | ], 336 | "text/plain": [ 337 | "" 338 | ] 339 | }, 340 | "execution_count": 6, 341 | "metadata": {}, 342 | "output_type": "execute_result" 343 | } 344 | ], 345 | "source": [ 346 | "# publish the initial layer\n", 347 | "temp_dir_path = tempfile.mkdtemp()\n", 348 | "temp_file_path = os.path.join(temp_dir_path, 'Inspections.geojson')\n", 349 | "with open(temp_file_path, 'w') as temp_file:\n", 350 | " json.dump(init_geo, temp_file)\n", 351 | "\n", 352 | "item = gis.content.add({\n", 353 | " \"type\": \"GeoJson\",\n", 354 | " \"title\": \"Inspections\",\n", 355 | " \"tags\": \"esri_partner_tools_sample\",\n", 356 | " # ... additional properties like descriptions\n", 357 | "}, data=temp_file_path)\n", 358 | "shutil.rmtree(temp_dir_path)\n", 359 | "\n", 360 | "lyr_item = item.publish()\n", 361 | "lyr_item" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": 10, 367 | "metadata": {}, 368 | "outputs": [ 369 | { 370 | "data": { 371 | "text/html": [ 372 | "
\n", 373 | "
\n", 374 | " \n", 375 | " \n", 376 | " \n", 377 | "
\n", 378 | "\n", 379 | "
\n", 380 | " Append Inspections\n", 381 | " \n", 382 | "
GeoJson by mpayson_startups\n", 383 | "
Last Modified: January 29, 2020\n", 384 | "
0 comments, 0 views\n", 385 | "
\n", 386 | "
\n", 387 | " " 388 | ], 389 | "text/plain": [ 390 | "" 391 | ] 392 | }, 393 | "execution_count": 10, 394 | "metadata": {}, 395 | "output_type": "execute_result" 396 | } 397 | ], 398 | "source": [ 399 | "# overwrite the layer with second geojson \n", 400 | "temp_dir_path = tempfile.mkdtemp()\n", 401 | "temp_file_path = os.path.join(temp_dir_path, 'AppendInspections.geojson')\n", 402 | "with open(temp_file_path, 'w') as temp_file:\n", 403 | " json.dump(next_geo, temp_file)\n", 404 | " \n", 405 | "# add geojson as an item\n", 406 | "append_item = gis.content.add({\n", 407 | " \"type\": \"GeoJson\",\n", 408 | " \"title\": \"Append Inspections\",\n", 409 | " \"tags\": \"esri_partner_tools_sample\"\n", 410 | "}, data=temp_file_path)\n", 411 | "shutil.rmtree(temp_dir_path)\n", 412 | "\n", 413 | "lyr = lyr_item.layers[0]\n", 414 | "lyr.append(\n", 415 | " item_id=append_item.id,\n", 416 | " upload_format='geojson',\n", 417 | " # optional field to identify existing data to update from new data\n", 418 | " # field needs to be uniquely indexed (see manage indexes notebook)\n", 419 | " # by default, this is the ObjectID\n", 420 | "# upsert_matching_field=\"field_name\"\n", 421 | ")\n", 422 | "\n", 423 | "lyr_item" 424 | ] 425 | }, 426 | { 427 | "cell_type": "markdown", 428 | "metadata": {}, 429 | "source": [ 430 | "### Clean up" 431 | ] 432 | }, 433 | { 434 | "cell_type": "code", 435 | "execution_count": 11, 436 | "metadata": {}, 437 | "outputs": [ 438 | { 439 | "data": { 440 | "text/plain": [ 441 | "True" 442 | ] 443 | }, 444 | "execution_count": 11, 445 | "metadata": {}, 446 | "output_type": "execute_result" 447 | } 448 | ], 449 | "source": [ 450 | "delete_items = gis.content.search(\"tags:esri_partner_tools_sample\")\n", 451 | "gis.content.delete_items(delete_items)" 452 | ] 453 | } 454 | ], 455 | "metadata": { 456 | "kernelspec": { 457 | "display_name": "Python 3", 458 | "language": "python", 459 | "name": "python3" 460 | }, 461 | "language_info": { 462 | "codemirror_mode": { 463 | "name": "ipython", 464 | "version": 3 465 | }, 466 | "file_extension": ".py", 467 | "mimetype": "text/x-python", 468 | "name": "python", 469 | "nbconvert_exporter": "python", 470 | "pygments_lexer": "ipython3", 471 | "version": "3.8.3" 472 | } 473 | }, 474 | "nbformat": 4, 475 | "nbformat_minor": 4 476 | } 477 | -------------------------------------------------------------------------------- /common_workflows/update_webmaps.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Update Webmaps\n", 8 | "Short snippets for updating webmaps!" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "import json\n", 18 | "from arcgis.gis import GIS, Item" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 2, 24 | "metadata": {}, 25 | "outputs": [ 26 | { 27 | "name": "stdin", 28 | "output_type": "stream", 29 | "text": [ 30 | "Enter password: ········\n" 31 | ] 32 | } 33 | ], 34 | "source": [ 35 | "gis = GIS(username=\"mpayson_startups\")" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "## Initialization" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": {}, 49 | "outputs": [ 50 | { 51 | "data": { 52 | "text/html": [ 53 | "
\n", 54 | "
\n", 55 | " \n", 56 | " \n", 57 | " \n", 58 | "
\n", 59 | "\n", 60 | "
\n", 61 | " Demo Map Updates\n", 62 | " \n", 63 | "
Web Map by mpayson_startups\n", 64 | "
Last Modified: December 29, 2019\n", 65 | "
0 comments, 11 views\n", 66 | "
\n", 67 | "
\n", 68 | " " 69 | ], 70 | "text/plain": [ 71 | "" 72 | ] 73 | }, 74 | "execution_count": 3, 75 | "metadata": {}, 76 | "output_type": "execute_result" 77 | } 78 | ], 79 | "source": [ 80 | "# Get map item and fetch the JSON representation\n", 81 | "map_item = Item(gis, '185a060995a44e98b3db2c27f3f533e2')\n", 82 | "map_item" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 4, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "map_dict = dict(map_item.get_data())\n", 92 | "revert_dict = map_dict" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "metadata": {}, 98 | "source": [ 99 | "## Change basemap\n", 100 | "\n", 101 | "Replaces a webmap basemap with **[an existing basemap](https://www.arcgis.com/home/group.html?id=30de8da907d240a0bccd5ad3ff25ef4a&view=list&focus=maps-webmaps#content)** in ArcGIS Online." 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "data": { 111 | "text/plain": [ 112 | "True" 113 | ] 114 | }, 115 | "execution_count": 5, 116 | "metadata": {}, 117 | "output_type": "execute_result" 118 | } 119 | ], 120 | "source": [ 121 | "# colored pencil basemap\n", 122 | "bm_item = Item(gis, '826498a48bd0424f9c9315214f2165d4')\n", 123 | "\n", 124 | "map_dict = dict(map_item.get_data())\n", 125 | "bm_dict = dict(bm_item.get_data())\n", 126 | "\n", 127 | "map_dict['baseMap'] = bm_dict['baseMap']\n", 128 | "\n", 129 | "map_item.update(data=json.dumps(map_dict))" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "## Change layer visibility\n", 137 | "Controls whether a specific layer is visible when the map first loads" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 6, 143 | "metadata": {}, 144 | "outputs": [ 145 | { 146 | "data": { 147 | "text/plain": [ 148 | "True" 149 | ] 150 | }, 151 | "execution_count": 6, 152 | "metadata": {}, 153 | "output_type": "execute_result" 154 | } 155 | ], 156 | "source": [ 157 | "lyr_i = [\n", 158 | " i for i,l in enumerate(map_dict['operationalLayers'])\n", 159 | " if l['id'] == 'USA_Consumer_Expenditures_1_2019_5133'\n", 160 | "][0]\n", 161 | "\n", 162 | "is_vis = not map_dict['operationalLayers'][lyr_i]['visibility']\n", 163 | "map_dict['operationalLayers'][lyr_i]['visibility'] = is_vis\n", 164 | "\n", 165 | "map_item.update(data=json.dumps(map_dict))" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "## Change layer title & id\n", 173 | "Updates a specific layer title & id. **NOTE** Esri configurable apps use the ID to configure & define a layers behavior, so if you change the ID it will likely break any pre-created apps." 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 7, 179 | "metadata": {}, 180 | "outputs": [ 181 | { 182 | "data": { 183 | "text/plain": [ 184 | "True" 185 | ] 186 | }, 187 | "execution_count": 7, 188 | "metadata": {}, 189 | "output_type": "execute_result" 190 | } 191 | ], 192 | "source": [ 193 | "lyr_dict = map_dict['operationalLayers'][0]\n", 194 | "update_lyr_dict = {\n", 195 | " **lyr_dict,\n", 196 | " 'title': 'Some cool demographic data',\n", 197 | " 'id': 'my_well_known_id'\n", 198 | "}\n", 199 | "map_dict['operationalLayers'][0] = update_lyr_dict\n", 200 | "\n", 201 | "map_item.update(data=json.dumps(map_dict))" 202 | ] 203 | }, 204 | { 205 | "cell_type": "markdown", 206 | "metadata": {}, 207 | "source": [ 208 | "## Apply filter to layer\n", 209 | "Limits what data gets drawn on the map. Note, this is used as an initial configuration, not to securely limit what data the end-user can access. For the latter, [create a new view item](https://github.com/mpayson/esri-partner-tools/blob/master/feature_layers/create_views.ipynb).\n", 210 | "\n", 211 | "TODO: the mapviewer also automatically builds an index when a filter is applied, might be worth doing the same. Similar code and index paramss:\n", 212 | "```python\n", 213 | "\"indexes\":[{\n", 214 | " \"name\":\"GRADE_Index\",\n", 215 | " \"fields\":\"GRADE\",\n", 216 | " \"isUnique\":False,\n", 217 | " \"isAscending\":True,\n", 218 | " \"description\":\"GRADE_Index\"\n", 219 | "}]\n", 220 | "```" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 8, 226 | "metadata": {}, 227 | "outputs": [ 228 | { 229 | "data": { 230 | "text/plain": [ 231 | "True" 232 | ] 233 | }, 234 | "execution_count": 8, 235 | "metadata": {}, 236 | "output_type": "execute_result" 237 | } 238 | ], 239 | "source": [ 240 | "lyr_i = [\n", 241 | " i for i,l in enumerate(map_dict['operationalLayers'])\n", 242 | " if l['id'] == 'aff768_9005'\n", 243 | "][0]\n", 244 | "lyr_dict = map_dict['operationalLayers'][lyr_i]\n", 245 | "\n", 246 | "sql_where = \"GRADE = 'A'\"\n", 247 | "update_lyr_dict = {\n", 248 | " **lyr_dict,\n", 249 | " \"layerDefinition\": {\n", 250 | " \"definitionExpression\": sql_where\n", 251 | " }\n", 252 | "}\n", 253 | "\n", 254 | "map_dict['operationalLayers'][lyr_i] = update_lyr_dict\n", 255 | "map_item.update(data=json.dumps(map_dict))" 256 | ] 257 | }, 258 | { 259 | "cell_type": "markdown", 260 | "metadata": {}, 261 | "source": [ 262 | "## Clean up" 263 | ] 264 | }, 265 | { 266 | "cell_type": "code", 267 | "execution_count": 9, 268 | "metadata": {}, 269 | "outputs": [ 270 | { 271 | "data": { 272 | "text/plain": [ 273 | "True" 274 | ] 275 | }, 276 | "execution_count": 9, 277 | "metadata": {}, 278 | "output_type": "execute_result" 279 | } 280 | ], 281 | "source": [ 282 | "map_item.update(data=json.dumps(revert_dict))" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "metadata": {}, 289 | "outputs": [], 290 | "source": [] 291 | } 292 | ], 293 | "metadata": { 294 | "kernelspec": { 295 | "display_name": "Python 3", 296 | "language": "python", 297 | "name": "python3" 298 | }, 299 | "language_info": { 300 | "codemirror_mode": { 301 | "name": "ipython", 302 | "version": 3 303 | }, 304 | "file_extension": ".py", 305 | "mimetype": "text/x-python", 306 | "name": "python", 307 | "nbconvert_exporter": "python", 308 | "pygments_lexer": "ipython3", 309 | "version": "3.7.4" 310 | } 311 | }, 312 | "nbformat": 4, 313 | "nbformat_minor": 4 314 | } 315 | --------------------------------------------------------------------------------