├── .git-blame-ignore-revs ├── .gitignore ├── LICENSE ├── README.md ├── pyproject.toml ├── requirements.txt ├── requirements_lint.txt ├── scripts ├── CloseApiWrapper.py ├── __init__.py ├── bulk_download_call_recordings.py ├── bulk_update_address_countries.py ├── bulk_update_leads_info.py ├── change_sequence_sender.py ├── clone_organization.py ├── csv_to_cio.py ├── custom_field_change_report.py ├── delete_emails_from_contacts.py ├── delete_secondary_addresses.py ├── delete_tasks_for_inactive_users.py ├── events_by_request_id.py ├── export_activities_to_json.py ├── export_calls.py ├── export_sequence_subscriptions_public.py ├── export_sequences_data.py ├── export_sms.py ├── find_contact_duplicates_on_single_lead.py ├── find_duplicate_leads.py ├── import_leads_from_close_json.py ├── move_custom_field_to_contact_info.py ├── restore_deleted_leads.py ├── run_leads_deleted_report.py ├── run_leads_merged_report.py ├── sample_script.py ├── time_to_respond_report.py ├── update_opportunities.py └── user_reassign.py └── setup.cfg /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Migrate code style to Black 2 | 871f0b75d75969c3e59408adb93426c1159dc3f3 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # Finder 82 | .DS_Store 83 | 84 | # virtualenv 85 | venv/ 86 | ENV/ 87 | 88 | # Spyder project settings 89 | .spyderproject 90 | 91 | # Rope project settings 92 | .ropeproject 93 | 94 | # PyCharm (Jet Brains) project settings 95 | .idea -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Close 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Close API scripts 2 | 3 | Example Python scripts for interacting with [Close](http://close.com/) through its [API](http://developer.close.com/) 4 | using the [closeio_api Python client](https://github.com/closeio/closeio-api). 5 | 6 | ## Install basic dependencies 7 | 8 | Before you start, you should already have `git`, `python 3` and `virtualenv` installed. 9 | For OS X users, we recommend [Homebrew](https://brew.sh/). 10 | 11 | ## Setup 12 | 13 | 1. `git clone https://github.com/closeio/close-api-scripts.git` 14 | 2. `cd close-api-scripts` 15 | 3. `virtualenv venv` 16 | 4. `. venv/bin/activate` 17 | 5. `pip install -r requirements.txt` 18 | 19 | ## Running a script 20 | 21 | Example: 22 | 23 | ```bash 24 | python scripts/run_leads_deleted_report.py -k MYAPIKEY 25 | ... 26 | ``` 27 | 28 | If you have any questions, please contact [support@close.com](mailto:support@close.com?Subject=Close%20API%20Scripts). -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | skip-string-normalization = true 3 | target-version = ['py37'] 4 | line-length = 79 5 | exclude = ''' 6 | /( 7 | \.git 8 | | \.venv 9 | | venv 10 | )/ 11 | ''' -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | click==6.7 2 | progressbar-latest==2.4 3 | python-dateutil==2.7.3 4 | requests==2.20.0 5 | Unidecode==1.0.22 6 | closeio==2.0 7 | gevent==22.10.2 8 | -------------------------------------------------------------------------------- /requirements_lint.txt: -------------------------------------------------------------------------------- 1 | lintlizard==0.8.0 -------------------------------------------------------------------------------- /scripts/CloseApiWrapper.py: -------------------------------------------------------------------------------- 1 | from closeio_api import Client 2 | 3 | 4 | class CloseApiWrapper(Client): 5 | """ 6 | Close API wrapper that makes it easier to paginate through resources and get all items 7 | with a single function call alongside some convenience functions (e.g. getting all lead statuses). 8 | """ 9 | 10 | def __init__( 11 | self, api_key=None, tz_offset=None, max_retries=5, development=False 12 | ): 13 | super().__init__( 14 | api_key=api_key, 15 | tz_offset=tz_offset, 16 | max_retries=max_retries, 17 | development=development, 18 | ) 19 | 20 | def get_lead_statuses(self): 21 | organization_id = self.get('me')['organizations'][0]['id'] 22 | return self.get( 23 | f"organization/{organization_id}", 24 | params={"_fields": "lead_statuses"}, 25 | )["lead_statuses"] 26 | 27 | def get_opportunity_pipelines(self): 28 | organization_id = self.get('me')['organizations'][0]['id'] 29 | return self.get( 30 | f"organization/{organization_id}", 31 | params={"_fields": "pipelines"}, 32 | )["pipelines"] 33 | 34 | def get_custom_fields(self, type): 35 | return self.get(f"custom_field_schema/{type}")["fields"] 36 | 37 | def get_opportunity_statuses(self): 38 | organization_id = self.get('me')['organizations'][0]['id'] 39 | pipelines = self.get( 40 | f"organization/{organization_id}", 41 | params={"_fields": "pipelines"}, 42 | )["pipelines"] 43 | 44 | opportunity_statuses = [] 45 | for pipeline in pipelines: 46 | opportunity_statuses.extend(pipeline['statuses']) 47 | 48 | return opportunity_statuses 49 | 50 | def get_all_items(self, url, params=None): 51 | if params is None: 52 | params = {} 53 | 54 | items = [] 55 | has_more = True 56 | offset = 0 57 | while has_more: 58 | params["_skip"] = offset 59 | resp = self.get(url, params=params) 60 | items.extend(resp['data']) 61 | offset += len(resp["data"]) 62 | has_more = resp["has_more"] 63 | 64 | return items 65 | -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/closeio/close-api-scripts/97017ac59a7e0ea58583f74451bf9ea595962f85/scripts/__init__.py -------------------------------------------------------------------------------- /scripts/bulk_download_call_recordings.py: -------------------------------------------------------------------------------- 1 | import gevent.monkey 2 | from gevent.pool import Pool 3 | 4 | gevent.monkey.patch_all() 5 | 6 | import argparse 7 | from closeio_api import Client as CloseIO_API 8 | from dateutil.relativedelta import relativedelta 9 | from datetime import datetime 10 | import requests 11 | from operator import itemgetter 12 | import csv 13 | 14 | parser = argparse.ArgumentParser( 15 | description='Bulk Download Close Call Recordings into a specified Folder' 16 | ) 17 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 18 | parser.add_argument( 19 | '--date_start', 20 | '-s', 21 | required=True, 22 | help='The start of the date range you want to download recordings for in yyyy-mm-dd format.', 23 | ) 24 | parser.add_argument( 25 | '--date_end', 26 | '-e', 27 | required=True, 28 | help='The end of the date range you want to download recordings for in yyyy-mm-dd format.', 29 | ) 30 | parser.add_argument( 31 | '--file-path', 32 | '-f', 33 | required=True, 34 | help='The file path to the folder where the recordings will be stored.', 35 | ) 36 | args = parser.parse_args() 37 | 38 | api = CloseIO_API(args.api_key) 39 | 40 | days = [] 41 | calls = [] 42 | downloaded_calls = [] 43 | starting_date = datetime.strptime(args.date_start, '%Y-%m-%d') 44 | ending_date = ( 45 | starting_date + relativedelta(days=+1) - relativedelta(seconds=+1) 46 | ) 47 | ending_date_final = datetime.strptime(args.date_end, '%Y-%m-%d') 48 | 49 | # Generate a list of days to cycle through in the date range 50 | while starting_date < ending_date_final: 51 | starting_date_string = datetime.strftime( 52 | starting_date, "%Y-%m-%dT%H:%M:%S" 53 | ) 54 | ending_date_string = datetime.strftime(ending_date, "%Y-%m-%dT%H:%M:%S") 55 | days.append( 56 | { 57 | 'day': starting_date.strftime('%Y-%m-%d'), 58 | 'start_date': starting_date_string, 59 | 'end_date': ending_date_string, 60 | } 61 | ) 62 | starting_date = starting_date + relativedelta(days=+1) 63 | ending_date = ( 64 | starting_date + relativedelta(days=+1) - relativedelta(seconds=+1) 65 | ) 66 | 67 | 68 | # Method to get all of the recordings for a specific day. 69 | def getRecordedCalls(day): 70 | print(f"Getting all recorded call activities for {day['day']}...") 71 | has_more = True 72 | offset = 0 73 | while has_more: 74 | resp = api.get( 75 | 'activity/call', 76 | params={ 77 | '_skip': offset, 78 | 'date_created__gte': day['start_date'], 79 | 'date_created__lte': day['end_date'], 80 | '_fields': 'id,recording_url,voicemail_url,date_created,lead_id,duration,voicemail_duration,date_created', 81 | }, 82 | ) 83 | for call in resp['data']: 84 | if (call['duration'] > 0 or call['voicemail_duration'] > 0) and ( 85 | call.get('recording_url') or call.get('voicemail_url') 86 | ): 87 | call['url'] = call.get( 88 | 'recording_url', call.get('voicemail_url') 89 | ) 90 | if call['duration'] > 0: 91 | call['Type'] = 'Answered Call' 92 | call['Answered or Voicemail Duration'] = call['duration'] 93 | else: 94 | call['Type'] = 'Voicemail' 95 | call['Answered or Voicemail Duration'] = call[ 96 | 'voicemail_duration' 97 | ] 98 | calls.append(call) 99 | offset += len(resp['data']) 100 | has_more = resp['has_more'] 101 | 102 | 103 | pool = Pool(5) 104 | pool.map(getRecordedCalls, days) 105 | 106 | # Sort all calls by date_created to be in order because they were pulled in parallel 107 | calls = sorted(calls, key=itemgetter('date_created'), reverse=True) 108 | 109 | 110 | # Method to download a call recording or voicemail recording 111 | def downloadCall(call): 112 | try: 113 | call_title = "close-recording-%s.mp3" % call['id'] 114 | url = call['url'] 115 | doc = requests.get( 116 | url, 117 | headers={'Content-Type': 'application/json'}, 118 | auth=(args.api_key, ''), 119 | ) 120 | with open("%s/%s" % (args.file_path, call_title), 'wb') as f: 121 | f.write(doc.content) 122 | downloaded_calls.append( 123 | { 124 | 'Call Activity ID': call['id'], 125 | 'Date Created': call['date_created'], 126 | 'Type': call['Type'], 127 | 'Duration': call['Answered or Voicemail Duration'], 128 | 'Lead ID': call['lead_id'], 129 | 'Filename': call_title, 130 | 'url': url, 131 | } 132 | ) 133 | print( 134 | f"{(calls.index(call) + 1)} of {len(calls)}: Downloading {call_title}" 135 | ) 136 | except Exception as e: 137 | print(e) 138 | 139 | 140 | pool.map(downloadCall, calls) 141 | 142 | # Sort all downloaded calls by date_created to be in order because they were pulled in parallel 143 | downloaded_calls = sorted( 144 | downloaded_calls, key=itemgetter('Date Created'), reverse=True 145 | ) 146 | # Write Filename Output to CSV 147 | org_name = api.get('me')['organizations'][0]['name'].replace('/', '') 148 | f = open( 149 | f'{args.file_path}/{org_name} Downloaded Call Recordings from {args.date_start} to {args.date_end} Reference.csv', 150 | 'w', 151 | newline='', 152 | encoding='utf-8', 153 | ) 154 | try: 155 | ordered_keys = [ 156 | 'Call Activity ID', 157 | 'Filename', 158 | 'Date Created', 159 | 'Type', 160 | 'Duration', 161 | 'Lead ID', 162 | 'url', 163 | ] 164 | writer = csv.DictWriter(f, ordered_keys) 165 | writer.writeheader() 166 | writer.writerows(downloaded_calls) 167 | finally: 168 | f.close() 169 | -------------------------------------------------------------------------------- /scripts/bulk_update_address_countries.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import argparse 4 | import logging 5 | 6 | from closeio_api import Client as CloseIO_API 7 | 8 | LEADS_QUERY = '* sort:created' 9 | 10 | ISO_COUNTRIES = { 11 | 'AF': 'Afghanistan', 12 | 'AX': 'Aland Islands', 13 | 'AL': 'Albania', 14 | 'DZ': 'Algeria', 15 | 'AS': 'American Samoa', 16 | 'AD': 'Andorra', 17 | 'AO': 'Angola', 18 | 'AI': 'Anguilla', 19 | 'AQ': 'Antarctica', 20 | 'AG': 'Antigua and Barbuda', 21 | 'AR': 'Argentina', 22 | 'AM': 'Armenia', 23 | 'AW': 'Aruba', 24 | 'AU': 'Australia', 25 | 'AT': 'Austria', 26 | 'AZ': 'Azerbaijan', 27 | 'BS': 'Bahamas', 28 | 'BH': 'Bahrain', 29 | 'BD': 'Bangladesh', 30 | 'BB': 'Barbados', 31 | 'BY': 'Belarus', 32 | 'BE': 'Belgium', 33 | 'BZ': 'Belize', 34 | 'BJ': 'Benin', 35 | 'BM': 'Bermuda', 36 | 'BT': 'Bhutan', 37 | 'BO': 'Bolivia, Plurinational State of', 38 | 'BQ': 'Bonaire, Sint Eustatius and Saba', 39 | 'BA': 'Bosnia and Herzegovina', 40 | 'BW': 'Botswana', 41 | 'BV': 'Bouvet Island', 42 | 'BR': 'Brazil', 43 | 'IO': 'British Indian Ocean Territory', 44 | 'BN': 'Brunei Darussalam', 45 | 'BG': 'Bulgaria', 46 | 'BF': 'Burkina Faso', 47 | 'BI': 'Burundi', 48 | 'KH': 'Cambodia', 49 | 'CM': 'Cameroon', 50 | 'CA': 'Canada', 51 | 'CV': 'Cape Verde', 52 | 'KY': 'Cayman Islands', 53 | 'CF': 'Central African Republic', 54 | 'TD': 'Chad', 55 | 'CL': 'Chile', 56 | 'CN': 'China', 57 | 'CX': 'Christmas Island', 58 | 'CC': 'Cocos (Keeling) Islands', 59 | 'CO': 'Colombia', 60 | 'KM': 'Comoros', 61 | 'CG': 'Congo', 62 | 'CD': 'Congo, The Democratic Republic of the', 63 | 'CK': 'Cook Islands', 64 | 'CR': 'Costa Rica', 65 | 'CI': 'Cote d\'Ivoire', 66 | 'HR': 'Croatia', 67 | 'CU': 'Cuba', 68 | 'CW': 'Curaçao', 69 | 'CY': 'Cyprus', 70 | 'CZ': 'Czech Republic', 71 | 'DK': 'Denmark', 72 | 'DJ': 'Djibouti', 73 | 'DM': 'Dominica', 74 | 'DO': 'Dominican Republic', 75 | 'EC': 'Ecuador', 76 | 'EG': 'Egypt', 77 | 'SV': 'El Salvador', 78 | 'GQ': 'Equatorial Guinea', 79 | 'ER': 'Eritrea', 80 | 'EE': 'Estonia', 81 | 'ET': 'Ethiopia', 82 | 'FK': 'Falkland Islands (Malvinas)', 83 | 'FO': 'Faroe Islands', 84 | 'FJ': 'Fiji', 85 | 'FI': 'Finland', 86 | 'FR': 'France', 87 | 'GF': 'French Guiana', 88 | 'PF': 'French Polynesia', 89 | 'TF': 'French Southern Territories', 90 | 'GA': 'Gabon', 91 | 'GM': 'Gambia', 92 | 'GE': 'Georgia', 93 | 'DE': 'Germany', 94 | 'GH': 'Ghana', 95 | 'GI': 'Gibraltar', 96 | 'GR': 'Greece', 97 | 'GL': 'Greenland', 98 | 'GD': 'Grenada', 99 | 'GP': 'Guadeloupe', 100 | 'GU': 'Guam', 101 | 'GT': 'Guatemala', 102 | 'GG': 'Guernsey', 103 | 'GN': 'Guinea', 104 | 'GW': 'Guinea-Bissau', 105 | 'GY': 'Guyana', 106 | 'HT': 'Haiti', 107 | 'HM': 'Heard Island and McDonald Islands', 108 | 'VA': 'Holy See (Vatican City State)', 109 | 'HN': 'Honduras', 110 | 'HK': 'Hong Kong', 111 | 'HU': 'Hungary', 112 | 'IS': 'Iceland', 113 | 'IN': 'India', 114 | 'ID': 'Indonesia', 115 | 'IR': 'Iran, Islamic Republic of', 116 | 'IQ': 'Iraq', 117 | 'IE': 'Ireland', 118 | 'IM': 'Isle of Man', 119 | 'IL': 'Israel', 120 | 'IT': 'Italy', 121 | 'JM': 'Jamaica', 122 | 'JP': 'Japan', 123 | 'JE': 'Jersey', 124 | 'JO': 'Jordan', 125 | 'KZ': 'Kazakhstan', 126 | 'KE': 'Kenya', 127 | 'KI': 'Kiribati', 128 | 'KP': 'Korea, Democratic People\'s Republic of', 129 | 'KR': 'Korea, Republic of', 130 | 'KW': 'Kuwait', 131 | 'KG': 'Kyrgyzstan', 132 | 'LA': 'Lao People\'s Democratic Republic', 133 | 'LV': 'Latvia', 134 | 'LB': 'Lebanon', 135 | 'LS': 'Lesotho', 136 | 'LR': 'Liberia', 137 | 'LY': 'Libya', 138 | 'LI': 'Liechtenstein', 139 | 'LT': 'Lithuania', 140 | 'LU': 'Luxembourg', 141 | 'MO': 'Macao', 142 | 'MK': 'Macedonia, Republic of', 143 | 'MG': 'Madagascar', 144 | 'MW': 'Malawi', 145 | 'MY': 'Malaysia', 146 | 'MV': 'Maldives', 147 | 'ML': 'Mali', 148 | 'MT': 'Malta', 149 | 'MH': 'Marshall Islands', 150 | 'MQ': 'Martinique', 151 | 'MR': 'Mauritania', 152 | 'MU': 'Mauritius', 153 | 'YT': 'Mayotte', 154 | 'MX': 'Mexico', 155 | 'FM': 'Micronesia, Federated States of', 156 | 'MD': 'Moldova, Republic of', 157 | 'MC': 'Monaco', 158 | 'MN': 'Mongolia', 159 | 'ME': 'Montenegro', 160 | 'MS': 'Montserrat', 161 | 'MA': 'Morocco', 162 | 'MZ': 'Mozambique', 163 | 'MM': 'Myanmar', 164 | 'NA': 'Namibia', 165 | 'NR': 'Nauru', 166 | 'NP': 'Nepal', 167 | 'NL': 'Netherlands', 168 | 'NC': 'New Caledonia', 169 | 'NZ': 'New Zealand', 170 | 'NI': 'Nicaragua', 171 | 'NE': 'Niger', 172 | 'NG': 'Nigeria', 173 | 'NU': 'Niue', 174 | 'NF': 'Norfolk Island', 175 | 'MP': 'Northern Mariana Islands', 176 | 'NO': 'Norway', 177 | 'OM': 'Oman', 178 | 'PK': 'Pakistan', 179 | 'PW': 'Palau', 180 | 'PS': 'Palestinian Territory, Occupied', 181 | 'PA': 'Panama', 182 | 'PG': 'Papua New Guinea', 183 | 'PY': 'Paraguay', 184 | 'PE': 'Peru', 185 | 'PH': 'Philippines', 186 | 'PN': 'Pitcairn', 187 | 'PL': 'Poland', 188 | 'PT': 'Portugal', 189 | 'PR': 'Puerto Rico', 190 | 'QA': 'Qatar', 191 | 'RE': 'Réunion', 192 | 'RO': 'Romania', 193 | 'RU': 'Russian Federation', 194 | 'RW': 'Rwanda', 195 | 'BL': 'Saint Barthélemy', 196 | 'SH': 'Saint Helena, Ascension and Tristan da Cunha', 197 | 'KN': 'Saint Kitts and Nevis', 198 | 'LC': 'Saint Lucia', 199 | 'MF': 'Saint Martin (French part)', 200 | 'PM': 'Saint Pierre and Miquelon', 201 | 'VC': 'Saint Vincent and the Grenadines', 202 | 'WS': 'Samoa', 203 | 'SM': 'San Marino', 204 | 'ST': 'Sao Tome and Principe', 205 | 'SA': 'Saudi Arabia', 206 | 'SN': 'Senegal', 207 | 'RS': 'Serbia', 208 | 'SC': 'Seychelles', 209 | 'SL': 'Sierra Leone', 210 | 'SG': 'Singapore', 211 | 'SX': 'Sint Maarten (Dutch part)', 212 | 'SK': 'Slovakia', 213 | 'SI': 'Slovenia', 214 | 'SB': 'Solomon Islands', 215 | 'SO': 'Somalia', 216 | 'ZA': 'South Africa', 217 | 'GS': 'South Georgia and the South Sandwich Islands', 218 | 'ES': 'Spain', 219 | 'LK': 'Sri Lanka', 220 | 'SD': 'Sudan', 221 | 'SR': 'Suriname', 222 | 'SS': 'South Sudan', 223 | 'SJ': 'Svalbard and Jan Mayen', 224 | 'SZ': 'Swaziland', 225 | 'SE': 'Sweden', 226 | 'CH': 'Switzerland', 227 | 'SY': 'Syrian Arab Republic', 228 | 'TW': 'Taiwan, Province of China', 229 | 'TJ': 'Tajikistan', 230 | 'TZ': 'Tanzania, United Republic of', 231 | 'TH': 'Thailand', 232 | 'TL': 'Timor-Leste', 233 | 'TG': 'Togo', 234 | 'TK': 'Tokelau', 235 | 'TO': 'Tonga', 236 | 'TT': 'Trinidad and Tobago', 237 | 'TN': 'Tunisia', 238 | 'TR': 'Turkey', 239 | 'TM': 'Turkmenistan', 240 | 'TC': 'Turks and Caicos Islands', 241 | 'TV': 'Tuvalu', 242 | 'UG': 'Uganda', 243 | 'UA': 'Ukraine', 244 | 'AE': 'United Arab Emirates', 245 | 'GB': 'United Kingdom', 246 | 'US': 'United States', 247 | 'UM': 'United States Minor Outlying Islands', 248 | 'UY': 'Uruguay', 249 | 'UZ': 'Uzbekistan', 250 | 'VU': 'Vanuatu', 251 | 'VE': 'Venezuela, Bolivarian Republic of', 252 | 'VN': 'Viet Nam', 253 | 'VG': 'Virgin Islands, British', 254 | 'VI': 'Virgin Islands, U.S.', 255 | 'WF': 'Wallis and Futuna', 256 | 'EH': 'Western Sahara', 257 | 'YE': 'Yemen', 258 | 'ZM': 'Zambia', 259 | 'ZW': 'Zimbabwe', 260 | } 261 | 262 | parser = argparse.ArgumentParser( 263 | description='changing old country code to new country code' 264 | ) 265 | 266 | parser.add_argument( 267 | '--list-countries', 268 | '-l', 269 | action='store_true', 270 | help='List of valid countries', 271 | ) 272 | parser.add_argument('old_code', type=str, help='Old country code') 273 | parser.add_argument('new_code', type=str, help='New country code') 274 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 275 | parser.add_argument( 276 | '--confirmed', 277 | '-c', 278 | action='store_true', 279 | help='Without this flag, the script will do a dry run without actually updating any data.', 280 | ) 281 | args = parser.parse_args() 282 | 283 | log_format = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s" 284 | if not args.confirmed: 285 | log_format = 'DRY RUN: ' + log_format 286 | logging.basicConfig(level=logging.INFO, format=log_format) 287 | logging.debug('parameters: %s' % vars(args)) 288 | 289 | args.old_code = args.old_code.upper() 290 | args.new_code = args.new_code.upper() 291 | assert args.old_code in ISO_COUNTRIES.keys(), ( 292 | '%s country code is not valid' % args.old_code 293 | ) 294 | assert args.new_code in ISO_COUNTRIES.keys(), ( 295 | '%s country code is not valid' % args.new_code 296 | ) 297 | assert args.old_code != args.new_code, 'equal country codes' 298 | 299 | logging.info( 300 | 'old country: %s (%s) -> new country: %s (%s) ' 301 | % ( 302 | args.old_code, 303 | ISO_COUNTRIES[args.old_code], 304 | args.new_code, 305 | ISO_COUNTRIES[args.new_code], 306 | ) 307 | ) 308 | 309 | api = CloseIO_API(args.api_key) 310 | has_more = True 311 | offset = 0 312 | 313 | while has_more: 314 | resp = api.get( 315 | 'lead', 316 | params={ 317 | 'query': LEADS_QUERY, 318 | '_skip': offset, 319 | '_fields': 'id,addresses', 320 | }, 321 | ) 322 | 323 | leads = resp['data'] 324 | 325 | for lead in leads: 326 | need_update = False 327 | for address in lead['addresses']: 328 | if address['country'] == args.old_code: 329 | address['country'] = args.new_code 330 | need_update = True 331 | if need_update: 332 | if args.confirmed: 333 | api.put( 334 | 'lead/' + lead['id'], data={'addresses': lead['addresses']} 335 | ) 336 | logging.info('updated %s' % lead['id']) 337 | 338 | offset += len(leads) 339 | has_more = resp['has_more'] 340 | -------------------------------------------------------------------------------- /scripts/bulk_update_leads_info.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import csv 6 | import logging 7 | import re 8 | import sys 9 | 10 | from closeio_api import Client as CloseIO_API 11 | from dateutil.parser import parse as parse_date 12 | 13 | OPPORTUNITY_FIELDS = [ 14 | 'opportunity%s_note', 15 | 'opportunity%s_value', 16 | 'opportunity%s_value_period', 17 | 'opportunity%s_confidence', 18 | 'opportunity%s_status', 19 | 'opportunity%s_date_won', 20 | ] 21 | 22 | 23 | def get_contact_info(contact_no, csv_row, what, contact_type): 24 | columns = [ 25 | x 26 | for x in csv_row.keys() 27 | if re.match(r'contact%s_%s[0-9]' % (contact_no, what), x) 28 | and csv_row[x] 29 | ] 30 | contact_info = [] 31 | for col in columns: 32 | contact_info.append({what: csv_row[col], 'type': contact_type}) 33 | return contact_info 34 | 35 | 36 | parser = argparse.ArgumentParser( 37 | formatter_class=argparse.RawDescriptionHelpFormatter, 38 | description=""" 39 | Imports leads and related data from a csv file with header. 40 | Header's columns may be declared in any order. Detects csv dialect (delimeter and quotechar). 41 | """, 42 | epilog=""" 43 | key columns: 44 | * lead_id - If exists and not empty, update using lead_id. 45 | * company - If lead_id is empty or does not exist, imports to 46 | first lead from found company. If the company was 47 | not found, loads as new lead. 48 | * email_address - If lead_id is empty or does not exist and company is empty, imports to 49 | first lead from found email address. If the email address was 50 | not found, loads as new lead. 51 | lead columns: 52 | * url - lead url 53 | * description - lead description 54 | * status - lead status 55 | * note[0-9] - lead notes 56 | * address[0-9]_country - ISO 3166-1 alpha-2 country code 57 | * address[0-9]_city - city 58 | * address[0-9]_zipcode - zipcode 59 | * address[0-9]_label - label (business, mailing, other) 60 | * address[0-9]_state - state 61 | * address[0-9]_address_1 - text part 1 62 | * address[0-9]_address_2 - text part 2 63 | opportunity columns (new items will be added if all values filled): 64 | * opportunity[0-9]_note - opportunity note 65 | * opportunity[0-9]_value - opportunity value in cents 66 | * opportunity[0-9]_value_period - will have a value like one_time or monthly 67 | * opportunity[0-9]_confidence - opportunity confidence 68 | * opportunity[0-9]_status - opportunity status 69 | * opportunity[0-9]_date_won - opportunity date won 70 | contact columns (new contacts wil be added): 71 | * contact[0-9]_name - contact name 72 | * contact[0-9]_title - contact title 73 | * contact[0-9]_phone[0-9] - contact phones 74 | * contact[0-9]_email[0-9] - contact emails 75 | * contact[0-9]_url[0-9] - contact urls 76 | custom columns (new custom field will be created if not exists): 77 | * custom.[custom_field_name] - value of custom_field_name 78 | """, 79 | ) 80 | 81 | parser.add_argument('csvfile', type=argparse.FileType('rU'), help='csv file') 82 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 83 | parser.add_argument( 84 | '--confirmed', 85 | '-c', 86 | action='store_true', 87 | help='Without this flag, the script will do a dry run without actually updating any data.', 88 | ) 89 | parser.add_argument( 90 | '--create-custom-fields', 91 | '-f', 92 | action='store_true', 93 | help='Create new custom fields, if not exists.', 94 | ) 95 | parser.add_argument( 96 | '--disable-create', 97 | '-e', 98 | action='store_true', 99 | help='Prevent new lead creation. Update only exists leads.', 100 | ) 101 | parser.add_argument( 102 | '--continue-on-error', 103 | '-s', 104 | action='store_true', 105 | help='Do not abort import after first error', 106 | ) 107 | args = parser.parse_args() 108 | 109 | log_format = "[%(asctime)s] %(levelname)s %(message)s" 110 | if not args.confirmed: 111 | log_format = 'DRY RUN: ' + log_format 112 | logging.basicConfig(level=logging.INFO, format=log_format) 113 | logging.debug('parameters: %s' % vars(args)) 114 | 115 | sniffer = csv.Sniffer() 116 | dialect = sniffer.sniff(args.csvfile.read(1000000)) 117 | args.csvfile.seek(0) 118 | error_array = [] 119 | c = csv.DictReader(args.csvfile, dialect=dialect) 120 | 121 | unique_field = None 122 | 123 | assert any( 124 | x in ('company', 'lead_id', 'email_address') 125 | or x.startswith('unique.custom.') 126 | for x in c.fieldnames 127 | ), 'ERROR: column "company" or "lead_id" or "email_address" or a field starting with unique.custom. is not found' 128 | 129 | if 'lead_id' not in c.fieldnames: 130 | unique_fields = [i for i in c.fieldnames if i.startswith('unique.custom.')] 131 | if len(unique_fields) > 0: 132 | unique_field = unique_fields[0] 133 | 134 | header_row = {} 135 | for col in c.fieldnames: 136 | header_row[col] = col 137 | header_row['Validation Error'] = 'Validation Error' 138 | 139 | error_array.append(header_row) 140 | 141 | api = CloseIO_API(args.api_key) 142 | org_id = api.get('me')['organizations'][0]['id'] 143 | org = api.get('organization/' + org_id) 144 | org_name = org['name'] 145 | 146 | resp = org['lead_custom_fields'] 147 | available_custom_fieldnames = [x['name'] for x in resp] 148 | new_custom_fieldnames = [ 149 | x 150 | for x in [ 151 | y.split('.', 1)[1] for y in c.fieldnames if y.startswith('custom.') 152 | ] 153 | if x not in available_custom_fieldnames 154 | ] 155 | multi_select_fields = [x['name'] for x in resp if x['accepts_multiple_values']] 156 | 157 | if new_custom_fieldnames: 158 | if args.create_custom_fields: 159 | for field in new_custom_fieldnames: 160 | if args.confirmed: 161 | api.post( 162 | 'custom_fields/lead', data={'name': field, 'type': 'text'} 163 | ) 164 | available_custom_fieldnames.append(field) 165 | logging.info('added new custom field "%s"' % field) 166 | else: 167 | logging.error('unknown custom fieldnames: %s' % new_custom_fieldnames) 168 | sys.exit(1) 169 | 170 | logging.debug('avaliable custom fields: %s' % available_custom_fieldnames) 171 | 172 | updated_leads = 0 173 | new_leads = 0 174 | skipped_leads = 0 175 | 176 | for r in c: 177 | payload = {} 178 | 179 | # Skip all-empty rows 180 | if not any(r.values()): 181 | continue 182 | 183 | if r.get('company'): 184 | payload['name'] = r['company'] 185 | 186 | if r.get('url'): 187 | payload['url'] = r['url'] 188 | 189 | if r.get('description'): 190 | payload['description'] = r['description'] 191 | 192 | if r.get('status'): 193 | payload['status'] = r['status'] 194 | 195 | contact_indexes = [ 196 | y[len('contact')] 197 | for y in r.keys() 198 | if re.match(r'contact[0-9]_name', y) 199 | ] # extract the ordinal number for all the contacts in this row (y[7] bcos len('contact') == 7) 200 | contacts = [] 201 | for idx in contact_indexes: 202 | contact = {} 203 | if r.get('contact%s_name' % idx): 204 | contact['name'] = r['contact%s_name' % idx] 205 | if r.get('contact%s_title' % idx): 206 | contact['title'] = r['contact%s_title' % idx] 207 | phones = get_contact_info(idx, r, 'phone', 'office') 208 | if phones: 209 | contact['phones'] = phones 210 | emails = get_contact_info(idx, r, 'email', 'office') 211 | if emails: 212 | contact['emails'] = emails 213 | urls = get_contact_info(idx, r, 'url', 'url') 214 | if urls: 215 | contact['urls'] = urls 216 | if contact: 217 | contacts.append(contact) 218 | if contacts: 219 | payload['contacts'] = contacts 220 | 221 | addresses_indexes = set( 222 | [y[len('address')] for y in r.keys() if re.match(r'address[0-9]_*', y)] 223 | ) # extract the ordinal number for all the addresses in this row (y[7] bcos len('address') == 7) 224 | addresses = [] 225 | for idx in addresses_indexes: 226 | address = {} 227 | for z in [ 228 | 'country', 229 | 'city', 230 | 'zipcode', 231 | 'label', 232 | 'state', 233 | 'address_1', 234 | 'address_2', 235 | ]: 236 | if r.get('address%s_%s' % (idx, z)): 237 | address[z] = r['address%s_%s' % (idx, z)] 238 | if address: 239 | addresses.append(address) 240 | if addresses: 241 | payload['addresses'] = addresses 242 | 243 | custom_keys = [ 244 | key 245 | for key in r 246 | if key.startswith('custom.') 247 | and key.split('.', 1)[1] in available_custom_fieldnames 248 | and r[key] 249 | ] 250 | custom_patches = {} 251 | for key in custom_keys: 252 | if key.replace('custom.', "") in multi_select_fields: 253 | custom_patches[key] = [i.strip() for i in r[key].split(';')] 254 | 255 | else: 256 | custom_patches[key] = r[key] 257 | 258 | if custom_patches and custom_patches != {}: 259 | payload.update(custom_patches) 260 | 261 | if r.get(unique_field): 262 | payload.update( 263 | { 264 | unique_field.replace("unique.custom.", "custom."): r[ 265 | unique_field 266 | ] 267 | } 268 | ) 269 | 270 | try: 271 | lead = None 272 | if r.get('lead_id') is not None: 273 | # exists lead 274 | resp = api.get('lead/%s' % r['lead_id']) 275 | logging.debug('received: %s' % resp) 276 | lead = resp 277 | 278 | elif r.get(unique_field) is not None: 279 | field = unique_field.replace("unique.custom.", "custom.") 280 | resp = api.get( 281 | 'lead', 282 | params={ 283 | 'query': '"%s":"%s" sort:created' 284 | % (field, r[unique_field]), 285 | '_fields': 'id,display_name,name,contacts,custom', 286 | 'limit': 1, 287 | }, 288 | ) 289 | logging.debug('received: %s' % resp) 290 | if resp['total_results']: 291 | lead = resp['data'][0] 292 | 293 | elif r.get('email_address') is not None: 294 | resp = api.get( 295 | 'lead', 296 | params={ 297 | 'query': 'email_address:"%s" sort:created' 298 | % r['email_address'], 299 | '_fields': 'id,display_name,name,contacts,custom', 300 | 'limit': 1, 301 | }, 302 | ) 303 | logging.debug('received: %s' % resp) 304 | if resp['total_results']: 305 | lead = resp['data'][0] 306 | 307 | else: 308 | # first lead in the company 309 | resp = api.get( 310 | 'lead', 311 | params={ 312 | 'query': 'company:"%s" sort:created' % r['company'], 313 | '_fields': 'id,display_name,name,contacts,custom', 314 | 'limit': 1, 315 | }, 316 | ) 317 | logging.debug('received: %s' % resp) 318 | if resp['total_results']: 319 | lead = resp['data'][0] 320 | 321 | if lead: 322 | logging.debug('to sent: %s' % payload) 323 | if args.confirmed: 324 | if len(multi_select_fields) > 0 and lead.get('custom'): 325 | for key in multi_select_fields: 326 | if payload.get('custom.' + key) and lead['custom'].get( 327 | key 328 | ): 329 | payload['custom.' + key] = ( 330 | lead['custom'][key] + payload['custom.' + key] 331 | ) 332 | api.put('lead/' + lead['id'], data=payload) 333 | logging.info( 334 | 'line %d updated: %s %s' 335 | % ( 336 | c.line_num, 337 | lead['id'], 338 | lead.get('name') if lead.get('name') else '', 339 | ) 340 | ) 341 | updated_leads += 1 342 | # new lead 343 | elif lead is None and not args.disable_create: 344 | logging.debug('to sent: %s' % payload) 345 | if args.confirmed: 346 | lead = api.post('lead', data=payload) 347 | logging.info( 348 | 'line %d new: %s %s' 349 | % ( 350 | c.line_num, 351 | lead['id'] if args.confirmed else 'X', 352 | lead['display_name'], 353 | ) 354 | ) 355 | else: 356 | logging.info( 357 | 'line %d new lead for: %s' 358 | % ( 359 | c.line_num, 360 | r['company'] 361 | if r.get('company') 362 | else r.get('email_address') or r.get(unique_field), 363 | ) 364 | ) 365 | new_leads += 1 366 | 367 | elif lead is None and args.disable_create: 368 | r['Validation Error'] = 'Lead does not exist in Close' 369 | skipped_leads += 1 370 | logging.info( 371 | 'line %d skipped: %s does not exist in Close' 372 | % ( 373 | c.line_num, 374 | r['company'] 375 | if r.get('company') 376 | else r.get('email_address') or r.get(unique_field), 377 | ) 378 | ) 379 | error_array.append(r) 380 | continue 381 | 382 | notes = [r[x] for x in r.keys() if re.match(r'note[0-9]', x) and r[x]] 383 | for note in notes: 384 | if args.confirmed: 385 | resp = api.post( 386 | 'activity/note', data={'note': note, 'lead_id': lead['id']} 387 | ) 388 | logging.debug( 389 | '%s new note: %s' 390 | % (lead['id'] if args.confirmed else 'X', note.decode('utf-8')) 391 | ) 392 | 393 | opportunity_ids = { 394 | x[len('opportunity')] 395 | for x in c.fieldnames 396 | if re.match(r'opportunity[0-9]', x) 397 | } 398 | for i in opportunity_ids: 399 | opp_payload = None 400 | if any([r.get(x % i) for x in OPPORTUNITY_FIELDS]): 401 | if r['opportunity%s_value_period' % i] not in ( 402 | 'one_time', 403 | 'monthly', 404 | ): 405 | logging.error( 406 | 'line %d invalid value_period "%s" for lead %d' 407 | % (c.line_num, r['opportunity%s_value_period' % i], i) 408 | ) 409 | continue 410 | 411 | opp_payload = { 412 | 'lead_id': lead['id'], 413 | 'note': r.get('opportunity%s_note' % i), 414 | # 'value': int(float(re.sub(r'[^\d.]', '', r['opportunity%s_value' % i])) * 100), # converts $1,000.42 into 100042 415 | 'value': int(r['opportunity%s_value' % i]) 416 | if r.get('opportunity%s_value' % i) 417 | else None, # assumes cents are given 418 | 'value_period': r.get('opportunity%s_value_period' % i), 419 | 'confidence': int(r['opportunity%s_confidence' % i]) 420 | if r.get('opportunity%s' % i) 421 | else None, 422 | 'status': r.get('opportunity%s_status' % i), 423 | 'date_won': str( 424 | parse_date(r['opportunity%s_date_won' % i]) 425 | ) 426 | if r.get('opportunity%s_date_won' % i) 427 | else None 428 | # 'date_won': str(parse_date(r['opportunity%s_date_won' % i])) if 'opportunity%s_date_won' % i in r else None 429 | # 'date_won': str(datetime.datetime.strptime(r['opportunity%s_date_won' % i], '%d/%m/%y')), 430 | } 431 | if args.confirmed: 432 | api.post('opportunity', data=opp_payload) 433 | else: 434 | logging.error( 435 | 'line %d is not a fully filled opportunity %s, skipped' 436 | % (c.line_num, i) 437 | ) 438 | 439 | except Exception as e: 440 | logging.error('line %d skipped with error %s' % (c.line_num, e)) 441 | skipped_leads += 1 442 | r['Validation Error'] = e 443 | error_array.append(r) 444 | if not args.continue_on_error: 445 | logging.info('stopped on error') 446 | sys.exit(1) 447 | 448 | logging.info( 449 | 'summary: updated[%d], new[%d], skipped[%d]' 450 | % (updated_leads, new_leads, skipped_leads) 451 | ) 452 | 453 | if len(error_array) > 1: 454 | f = open( 455 | f'{org_name} Bulk Update Errored Rows.csv', 'wt', encoding='utf-8' 456 | ) 457 | try: 458 | keys = error_array[0].keys() 459 | ordered_keys = ['Validation Error'] + c.fieldnames 460 | writer = csv.DictWriter(f, ordered_keys) 461 | writer.writerows(error_array) 462 | finally: 463 | f.close() 464 | -------------------------------------------------------------------------------- /scripts/change_sequence_sender.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from closeio_api import APIError, Client as CloseIO_API 4 | 5 | parser = argparse.ArgumentParser( 6 | description='Change sequence sender for specific user' 7 | ) 8 | 9 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 10 | parser.add_argument( 11 | '--from-email', 12 | '-f', 13 | required=True, 14 | help='Current email address being used to send sequence', 15 | ) 16 | parser.add_argument( 17 | '--to-email', 18 | '-t', 19 | required=True, 20 | help='Email address you want to use to send sequence', 21 | ) 22 | parser.add_argument( 23 | '--sender-account-id', 24 | '-s', 25 | required=True, 26 | help='Email account id you want to use to send sequence', 27 | ) 28 | parser.add_argument( 29 | '--sender-name', 30 | '-n', 31 | required=True, 32 | help='Sender name you want to use to send sequence', 33 | ) 34 | 35 | args = parser.parse_args() 36 | api = CloseIO_API(args.api_key) 37 | 38 | from_subs = [] 39 | 40 | print("Getting sequences") 41 | sequences = [] 42 | has_more = True 43 | offset = 0 44 | while has_more: 45 | resp = api.get( 46 | 'sequence', 47 | params={'_skip': offset}, 48 | ) 49 | sequences.extend(resp['data']) 50 | offset += len(resp['data']) 51 | has_more = resp['has_more'] 52 | 53 | for sequence in sequences: 54 | print(f"Getting sequence subscriptions for `{sequence['name']}`") 55 | has_more = True 56 | offset = 0 57 | while has_more: 58 | sub_results = api.get( 59 | 'sequence_subscription', 60 | params={'_skip': offset, 'sequence_id': sequence['id']}, 61 | ) 62 | from_subs += [ 63 | i 64 | for i in sub_results['data'] 65 | if i['sender_email'] == args.from_email 66 | and i['status'] in ['active', 'paused', 'error', 'goal'] 67 | ] 68 | offset += len(sub_results['data']) 69 | has_more = sub_results['has_more'] 70 | print(offset) 71 | 72 | print(f"Total subscriptions: {len(from_subs)}") 73 | print("Updating subscriptions") 74 | 75 | count = 0 76 | for sub in from_subs: 77 | try: 78 | api.put( 79 | f"sequence_subscription/{sub['id']}", 80 | data={ 81 | 'sender_name': args.sender_name, 82 | 'sender_account_id': args.sender_account_id, 83 | 'sender_email': args.to_email, 84 | }, 85 | ) 86 | count += 1 87 | print(f"{count}: {sub['id']}") 88 | except APIError as e: 89 | print(f"Can't update sequence {sub['id']} because {str(e)}") 90 | -------------------------------------------------------------------------------- /scripts/clone_organization.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from closeio_api import APIError 4 | 5 | from scripts.CloseApiWrapper import CloseApiWrapper 6 | 7 | arg_parser = argparse.ArgumentParser( 8 | description="Clone one organization to another" 9 | ) 10 | arg_parser.add_argument( 11 | "--from-api-key", 12 | "-f", 13 | required=True, 14 | help="API Key for source organization", 15 | ) 16 | arg_parser.add_argument( 17 | "--to-api-key", 18 | "-t", 19 | required=True, 20 | help="API Key for destination organization", 21 | ) 22 | arg_parser.add_argument( 23 | "--statuses", 24 | action="store_true", 25 | help="Copy lead & opportunity statuses", 26 | ) 27 | arg_parser.add_argument( 28 | "--lead-statuses", 29 | action="store_true", 30 | help="Copy lead statuses", 31 | ) 32 | arg_parser.add_argument( 33 | "--opportunity-statuses", 34 | action="store_true", 35 | help="Copy opportunity statuses", 36 | ) 37 | arg_parser.add_argument( 38 | "--custom-fields", 39 | action="store_true", 40 | help="Copy lead, contact, and opportunity custom fields", 41 | ) 42 | arg_parser.add_argument( 43 | "--lead-custom-fields", 44 | action="store_true", 45 | help="Copy lead custom fields", 46 | ) 47 | arg_parser.add_argument( 48 | "--opportunity-custom-fields", 49 | action="store_true", 50 | help="Copy opportunity custom fields", 51 | ) 52 | arg_parser.add_argument( 53 | "--contact-custom-fields", 54 | action="store_true", 55 | help="Copy contact custom fields", 56 | ) 57 | arg_parser.add_argument( 58 | "--custom-activities", 59 | action="store_true", 60 | help="Copy custom activities", 61 | ) 62 | arg_parser.add_argument( 63 | "--custom-objects", 64 | action="store_true", 65 | help="Copy custom objects", 66 | ) 67 | arg_parser.add_argument( 68 | "--smart-views", action="store_true", help="Copy smart views" 69 | ) 70 | arg_parser.add_argument( 71 | "--templates", action="store_true", help="Copy email & SMS templates" 72 | ) 73 | arg_parser.add_argument( 74 | "--email-templates", action="store_true", help="Copy email templates" 75 | ) 76 | arg_parser.add_argument( 77 | "--sms-templates", action="store_true", help="Copy SMS templates" 78 | ) 79 | arg_parser.add_argument( 80 | "--sequences", "--workflows", action="store_true", help="Copy workflows (excluding any that contain non-Email or non-SMS steps)" 81 | ) 82 | arg_parser.add_argument( 83 | "--integration-links", 84 | action="store_true", 85 | help="Copy integration links", 86 | ) 87 | arg_parser.add_argument("--roles", action="store_true", help="Copy roles") 88 | arg_parser.add_argument( 89 | "--webhooks", action="store_true", help="Copy webhooks" 90 | ) 91 | arg_parser.add_argument( 92 | "--groups", action="store_true", help="Copy groups without members." 93 | ) 94 | arg_parser.add_argument( 95 | "--groups-with-members", action="store_true", help="Copy groups including members. Any member that hasn't been " 96 | "added to the destination organization will be skipped." 97 | ) 98 | arg_parser.add_argument( 99 | "--all", "-a", action="store_true", help="Copy all settings" 100 | ) 101 | args = arg_parser.parse_args() 102 | 103 | from_api = CloseApiWrapper(args.from_api_key) 104 | to_api = CloseApiWrapper(args.to_api_key) 105 | 106 | from_organization = from_api.get("me")["organizations"][0] 107 | to_organization = to_api.get("me")["organizations"][0] 108 | 109 | message = f"Cloning `{from_organization['name']}` ({from_organization['id']}) organization to `{to_organization['name']}` ({to_organization['id']})..." 110 | message += '\nData from source organization will be added to the destination organization. No data will be deleted.\n\nContinue?' 111 | 112 | confirmed = input(f"{message} (y/n)\n") 113 | if confirmed not in ["yes", "y"]: 114 | exit() 115 | 116 | # Role mapping 117 | from_roles = from_api.get('role')['data'] 118 | to_roles = to_api.get('role')['data'] 119 | 120 | 121 | def replace_old_role_ids_with_new(editable_with_roles): 122 | new_roles = [] 123 | for old_role_id in editable_with_roles: 124 | if old_role_id.startswith('role_'): 125 | old_role = next( 126 | (x for x in from_roles if x['id'] == old_role_id), 127 | None, 128 | ) 129 | if not old_role: 130 | print(f"Couldn't find role with ID `{old_role_id}`") 131 | continue 132 | 133 | new_role = next( 134 | (x for x in to_roles if x['name'] == old_role['name']), 135 | None, 136 | ) 137 | if new_role: 138 | new_roles.append(new_role['id']) 139 | else: 140 | # Built-in roles such as `admin` 141 | new_roles.append(old_role_id) 142 | 143 | return new_roles 144 | 145 | 146 | if args.lead_statuses or args.statuses or args.all: 147 | print("\nCopying Lead Statuses") 148 | 149 | from_lead_statuses = from_api.get_lead_statuses() 150 | for status in from_lead_statuses: 151 | del status["id"] 152 | 153 | try: 154 | to_api.post("status/lead", data=status) 155 | print(f'Added lead status `{status["label"]}`') 156 | except APIError as e: 157 | print(f"Couldn't add `{status['label']}` because {str(e)}") 158 | 159 | if args.opportunity_statuses or args.statuses or args.all: 160 | print("\nCopying Opportunity Statuses") 161 | to_pipelines = to_api.get_opportunity_pipelines() 162 | from_pipelines = from_api.get_opportunity_pipelines() 163 | 164 | for from_pipeline in from_pipelines: 165 | # Try to find an existing pipeline by name 166 | to_pipeline = next( 167 | (x for x in to_pipelines if x["name"] == from_pipeline["name"]), 168 | None, 169 | ) 170 | 171 | if not to_pipeline: 172 | # If the pipeline doesn't exist, create the pipeline alongside the statuses 173 | del from_pipeline["id"] 174 | del from_pipeline["organization_id"] 175 | 176 | try: 177 | to_pipeline = to_api.post("pipeline", data=from_pipeline) 178 | print(f'Added `{from_pipeline["name"]}` and its statuses') 179 | except APIError as e: 180 | print( 181 | f"Couldn't add `{from_pipeline['name']}` because {str(e)}" 182 | ) 183 | continue 184 | else: 185 | # Otherwise append the statuses to an existing pipeline 186 | for opp_status in from_pipeline["statuses"]: 187 | opp_status["pipeline_id"] = to_pipeline["id"] 188 | del opp_status["id"] 189 | 190 | try: 191 | to_api.post("status/opportunity", data=opp_status) 192 | print(f'Added opportunity status `{opp_status["label"]}`') 193 | except APIError as e: 194 | print( 195 | f"Couldn't add `{opp_status['label']}` because {str(e)}" 196 | ) 197 | 198 | if args.custom_objects or args.all: 199 | print("\nCopying Custom Objects") 200 | 201 | from_custom_object_types = from_api.get("custom_object_type")["data"] 202 | 203 | # Get the existing shared custom fields in case the new org already has them 204 | to_shared_custom_fields = to_api.get_all_items('custom_field/shared') 205 | 206 | from_custom_object_fields = from_api.get( 207 | 'custom_field/custom_object_type' 208 | )['data'] + from_api.get_all_items('custom_field/shared') 209 | 210 | old_to_new_object_map = {} 211 | 212 | # create all objects first 213 | for object_type in from_custom_object_types: 214 | object_type['editable_with_roles'] = replace_old_role_ids_with_new(object_type['editable_with_roles']) 215 | 216 | try: 217 | del object_type["organization_id"] 218 | new_object_type = to_api.post( 219 | "custom_object_type", data=object_type 220 | ) 221 | old_to_new_object_map[object_type['id']] = new_object_type['id'] 222 | print(f"Added `{object_type['name']}` custom object") 223 | except APIError as e: 224 | print( 225 | f"Couldn't add `{object_type['name']}` custom object because {str(e)}" 226 | ) 227 | continue 228 | 229 | for object_type in from_custom_object_types: 230 | for field in object_type["fields"]: 231 | # Get the object directly because some fields like `choices` aren't exposed in activity type `fields` array 232 | from_field = next( 233 | (x for x in from_custom_object_fields if x["id"] == field["id"]), 234 | None, 235 | ) 236 | from_field.pop('organization_id', None) 237 | 238 | if field['referenced_custom_type_id']: 239 | from_field['referenced_custom_type_id'] = old_to_new_object_map.get(field['referenced_custom_type_id'], 240 | None) 241 | if field["is_shared"]: 242 | to_field = next( 243 | ( 244 | x 245 | for x in to_shared_custom_fields 246 | if x['name'] == field['name'] 247 | ), 248 | None, 249 | ) 250 | 251 | if not to_field: 252 | # Create new shared field because it doesn't exist yet 253 | try: 254 | # Delete `associations` field as that references old (source) activities 255 | del from_field['associations'] 256 | 257 | to_field = to_api.post( 258 | f"custom_field/shared/", 259 | data=from_field, 260 | ) 261 | to_shared_custom_fields.append(to_field) 262 | print(f"Added `{field['name']}` shared field") 263 | except APIError as e: 264 | print( 265 | f"Couldn't add `{field['name']}` shared field because {str(e)}" 266 | ) 267 | continue 268 | 269 | try: 270 | to_api.post( 271 | f"custom_field/shared/{to_field['id']}/association", 272 | data={ 273 | 'object_type': 'custom_object_type', 274 | "custom_object_type_id": old_to_new_object_map.get(object_type['id'], None), 275 | "required": field['required'], 276 | 'editable_with_roles': replace_old_role_ids_with_new(field['editable_with_roles']), 277 | }, 278 | ) 279 | except APIError as e: 280 | print( 281 | f"Couldn't add `{field['name']}` associations because {str(e)}" 282 | ) 283 | else: 284 | # Non-shared (regular) field, just create it 285 | from_field['editable_with_roles'] = replace_old_role_ids_with_new(from_field['editable_with_roles']) 286 | from_field["custom_object_type_id"] = old_to_new_object_map.get(object_type['id'], None) 287 | 288 | try: 289 | to_api.post("custom_field/custom_object_type", data=from_field) 290 | print(f'Added `{field["name"]}` custom field') 291 | except APIError as e: 292 | print(from_field) 293 | print( 294 | f"Couldn't add `{field['name']}` custom field because {str(e)}" 295 | ) 296 | 297 | 298 | def copy_custom_fields(custom_field_type): 299 | # Get the existing shared custom fields in case the new org already has them 300 | to_shared_custom_fields = to_api.get_all_items('custom_field/shared') 301 | 302 | from_custom_fields = from_api.get( 303 | f"custom_field_schema/{custom_field_type}" 304 | )["fields"] 305 | 306 | for from_cf in from_custom_fields: 307 | del from_cf["id"] 308 | del from_cf["organization_id"] 309 | if from_cf['referenced_custom_type_id']: 310 | to_objects = to_api.get('custom_object_type')['data'] 311 | from_object_name = from_api.get(f'custom_object_type/{from_cf["referenced_custom_type_id"]}').get('name') 312 | to_object = next( 313 | (x for x in to_objects if x["name"] == from_object_name), 314 | None, 315 | ) 316 | if to_object: 317 | from_cf['referenced_custom_type_id'] = to_object['id'] 318 | else: 319 | continue 320 | 321 | try: 322 | if from_cf['is_shared']: 323 | to_cf = next( 324 | ( 325 | x 326 | for x in to_shared_custom_fields 327 | if x['name'] == from_cf['name'] 328 | ), 329 | None, 330 | ) 331 | 332 | if not to_cf: 333 | to_cf = to_api.post(f"custom_field/shared", data=from_cf) 334 | print(f'Created `{from_cf["name"]}` shared custom field') 335 | 336 | # Only add association to a custom field type that's being copied. 337 | # 338 | # For example, if you have a shared field for leads and contacts, and you're copying only lead custom fields, 339 | # we would add only `lead` association to that shared field. 340 | association = next(x for x in from_cf['associations'] if x['object_type'] == custom_field_type) 341 | to_api.post( 342 | f"custom_field/shared/{to_cf['id']}/association", 343 | data={ 344 | 'object_type': custom_field_type, 345 | 'editable_with_roles': replace_old_role_ids_with_new(association['editable_with_roles']), 346 | 'required': association['required'] 347 | }, 348 | ) 349 | print( 350 | f"Added `{custom_field_type}` association to shared `{from_cf['name']}` custom field" 351 | ) 352 | else: 353 | # Swap old role IDs with new role IDs (if any) 354 | from_cf['editable_with_roles'] = replace_old_role_ids_with_new(from_cf['editable_with_roles']) 355 | 356 | to_api.post(f"custom_field/{custom_field_type}", data=from_cf) 357 | print( 358 | f'Created `{from_cf["name"]}` {custom_field_type} custom field' 359 | ) 360 | except APIError as e: 361 | print(f"Couldn't add `{from_cf['name']}` because {str(e)}") 362 | 363 | 364 | if args.lead_custom_fields or args.custom_fields or args.all: 365 | print("\nCopying Lead Custom Fields") 366 | copy_custom_fields('lead') 367 | 368 | if args.opportunity_custom_fields or args.custom_fields or args.all: 369 | print("\nCopying Opportunity Custom Fields") 370 | copy_custom_fields('opportunity') 371 | 372 | if args.contact_custom_fields or args.custom_fields or args.all: 373 | print("\nCopying Contact Custom Fields") 374 | copy_custom_fields('contact') 375 | 376 | if args.integration_links or args.all: 377 | print("\nCopying Integration Links") 378 | integration_links = from_api.get_all_items('integration_link') 379 | for link in integration_links: 380 | del link["id"] 381 | del link["organization_id"] 382 | 383 | try: 384 | to_api.post("integration_link", data=link) 385 | print(f'Added `{link["name"]}`') 386 | except APIError as e: 387 | print(f"Couldn't add `{link['name']}` because {str(e)}") 388 | 389 | if args.roles or args.all: 390 | BUILT_IN_ROLES = [ 391 | "Admin", 392 | "Restricted User", 393 | "Super User", 394 | "User", 395 | ] 396 | 397 | print("\nCopying Roles") 398 | roles = from_api.get_all_items('role') 399 | for role in roles: 400 | if role["name"] in BUILT_IN_ROLES: 401 | continue 402 | 403 | del role["id"] 404 | del role["organization_id"] 405 | 406 | try: 407 | to_api.post("role", data=role) 408 | print(f'Added `{role["name"]}`') 409 | except APIError as e: 410 | print(f"Couldn't add `{role['name']}` because {str(e)}") 411 | 412 | if args.templates or args.email_templates or args.all: 413 | print("\nCopying Email Templates") 414 | templates = from_api.get_all_items('email_template') 415 | for template in templates: 416 | del template["id"] 417 | del template["organization_id"] 418 | 419 | try: 420 | to_api.post("email_template", data=template) 421 | print(f'Added `{template["name"]}`') 422 | except APIError as e: 423 | print(f"Couldn't add `{template['name']}` because {str(e)}") 424 | 425 | if args.templates or args.sms_templates or args.all: 426 | print("\nCopying SMS Templates") 427 | templates = from_api.get_all_items('sms_template') 428 | for template in templates: 429 | del template["id"] 430 | del template["organization_id"] 431 | 432 | try: 433 | to_api.post("sms_template", data=template) 434 | print(f'Added `{template["name"]}`') 435 | except APIError as e: 436 | print(f"Couldn't add `{template['name']}` because {str(e)}") 437 | 438 | # Assumes all the workflow steps (templates) were already transferred over 439 | if args.sequences or args.all: 440 | print("\nCopying Workflows") 441 | 442 | to_email_templates = to_api.get_all_items('email_template') 443 | to_sms_templates = to_api.get_all_items('sms_template') 444 | from_workflows = from_api.get_all_items('sequence') 445 | for workflow in from_workflows: 446 | steps = workflow["steps"] 447 | if [x for x in steps if x['step_type'] not in ['email', 'sms']]: 448 | print(f'Skipping `{workflow["name"]}` because it contains non-Email or non-SMS steps') 449 | continue 450 | 451 | del workflow["id"] 452 | del workflow["organization_id"] 453 | for step in steps: 454 | del step["id"] 455 | 456 | # Replace Email Template ID (if it exists ie. it's an Email step) 457 | if step.get('email_template_id'): 458 | from_template = from_api.get( 459 | f"email_template/{step['email_template_id']}", 460 | params={'_fields': 'name'}, 461 | ) 462 | for template in to_email_templates: 463 | if ( 464 | template["name"] == from_template["name"] 465 | and template["is_shared"] 466 | ): 467 | step["email_template_id"] = template["id"] 468 | 469 | # Replace SMS Template ID (if it exists ie. it's a SMS step) 470 | if step.get('sms_template_id'): 471 | from_template = from_api.get( 472 | f"sms_template/{step['sms_template_id']}", 473 | params={'_fields': 'name'}, 474 | ) 475 | for template in to_sms_templates: 476 | if ( 477 | template["name"] == from_template["name"] 478 | and template["is_shared"] 479 | ): 480 | step["sms_template_id"] = template["id"] 481 | 482 | try: 483 | to_api.post("sequence", data=workflow) 484 | print(f'Added `{workflow["name"]}`') 485 | except APIError as e: 486 | print(f"Couldn't add `{workflow['name']}` because {str(e)}") 487 | 488 | if args.custom_activities or args.all: 489 | print("\nCopying Custom Activities") 490 | 491 | # Fetch both shared and non-shared activity custom fields 492 | from_custom_fields = from_api.get_all_items( 493 | 'custom_field/activity' 494 | ) + from_api.get_all_items('custom_field/shared') 495 | 496 | # Get the existing shared custom fields in case the new org already has them 497 | to_shared_custom_fields = to_api.get_all_items('custom_field/shared') 498 | 499 | custom_activity_types = from_api.get("custom_activity")["data"] 500 | for activity_type in custom_activity_types: 501 | # Re-map old role IDs to new role IDs (by name) 502 | activity_type['editable_with_roles'] = replace_old_role_ids_with_new(activity_type['editable_with_roles']) 503 | 504 | try: 505 | del activity_type["organization_id"] 506 | new_activity_type = to_api.post( 507 | "custom_activity", data=activity_type 508 | ) 509 | print(f"Added `{activity_type['name']}` custom activity") 510 | except APIError as e: 511 | print( 512 | f"Couldn't add `{activity_type['name']}` custom activity because {str(e)}" 513 | ) 514 | continue 515 | 516 | for field in activity_type["fields"]: 517 | # Get the object directly because some fields like `choices` aren't exposed in activity type `fields` array 518 | from_field = next( 519 | (x for x in from_custom_fields if x["id"] == field["id"]), 520 | None, 521 | ) 522 | from_field.pop('organization_id', None) 523 | if from_field['referenced_custom_type_id']: 524 | to_objects = to_api.get('custom_object_type')['data'] 525 | from_object_name = from_api.get(f'custom_object_type/{from_field["referenced_custom_type_id"]}').get('name') 526 | to_object = next( 527 | (x for x in to_objects if x["name"] == from_object_name), 528 | None, 529 | ) 530 | if to_object: 531 | from_field['referenced_custom_type_id'] = to_object['id'] 532 | else: 533 | continue 534 | if field["is_shared"]: 535 | to_field = next( 536 | ( 537 | x 538 | for x in to_shared_custom_fields 539 | if x['name'] == field['name'] 540 | ), 541 | None, 542 | ) 543 | 544 | if not to_field: 545 | # Create new shared field because it doesn't exist yet 546 | try: 547 | # Delete `associations` field as that references old (source) activities 548 | del from_field['associations'] 549 | 550 | to_field = to_api.post( 551 | f"custom_field/shared/", 552 | data=from_field, 553 | ) 554 | to_shared_custom_fields.append(to_field) 555 | print(f"Added `{field['name']}` shared field") 556 | except APIError as e: 557 | print( 558 | f"Couldn't add `{field['name']}` shared field because {str(e)}" 559 | ) 560 | continue 561 | 562 | to_api.post( 563 | f"custom_field/shared/{to_field['id']}/association", 564 | data={ 565 | 'object_type': 'custom_activity_type', 566 | "custom_activity_type_id": new_activity_type["id"], 567 | "required": field['required'], 568 | 'editable_with_roles': replace_old_role_ids_with_new(field['editable_with_roles']), 569 | }, 570 | ) 571 | else: 572 | # Non-shared (regular) field, just create it 573 | from_field['editable_with_roles'] = replace_old_role_ids_with_new(from_field['editable_with_roles']) 574 | from_field["custom_activity_type_id"] = new_activity_type["id"] 575 | to_api.post("custom_field/activity/", data=from_field) 576 | 577 | if args.groups or args.groups_with_members or args.all: 578 | print("\nCopying Groups") 579 | groups = from_api.get('group')['data'] 580 | for group in groups: 581 | group = from_api.get(f'group/{group["id"]}', params={'_fields': 'name,members'}) 582 | 583 | try: 584 | new_group = to_api.post('group', data={'name': group['name']}) 585 | 586 | if args.groups_with_members: 587 | for member in group['members']: 588 | try: 589 | to_api.post(f'group/{new_group["id"]}/member', data={'user_id': member['user_id']}) 590 | except APIError as e: 591 | if 'Invalid organization members' in str(e): 592 | pass 593 | 594 | print(f'Added `{group["name"]}`') 595 | except APIError as e: 596 | print(f"Couldn't add `{group['name']}` because {str(e)}") 597 | 598 | if args.smart_views or args.all: 599 | 600 | def structured_replace(value, replacement_dictionary): 601 | ''' 602 | Recursively replace values in a dictionary with values from a replacement dictionary. 603 | This is used to replace IDs in source Smart Views with the new IDs in the destination account. 604 | 605 | IDs can be lead status IDs, opportunity status IDs, email templates, workflows, custom fields, etc. - pretty 606 | much anything apart from Smart View IDs which are handled separately as they are not known until the Smart View 607 | is created. 608 | ''' 609 | if type(value) == list: 610 | return [structured_replace(item, replacement_dictionary) for item in value] 611 | 612 | if type(value) == dict: 613 | return { 614 | key: structured_replace(value, replacement_dictionary) 615 | for key, value in value.items() 616 | } 617 | 618 | return replacement_dictionary.get(value, value) 619 | 620 | 621 | def textual_replace(value, replacement_dictionary): 622 | ''' 623 | Simple global & replace of IDs in source Smart Views with the new IDs in the destination account. 624 | Used only for deprecated (textual) queries. 625 | ''' 626 | for from_id, to_id in replacement_dictionary.items(): 627 | value = value.replace(from_id, to_id) 628 | 629 | return value 630 | 631 | 632 | def get_id_mappings(): 633 | map_from_to_id = {} 634 | 635 | # Custom Activity Types 636 | from_custom_activities = from_api.get("custom_activity")["data"] 637 | to_custom_activities = to_api.get("custom_activity")["data"] 638 | for from_ca in from_custom_activities: 639 | to_ca = next( 640 | (x for x in to_custom_activities if x['name'] == from_ca['name']), 641 | None, 642 | ) 643 | if to_ca: 644 | map_from_to_id[from_ca['id']] = to_ca['id'] 645 | 646 | # Custom fields 647 | def get_custom_fields(api): 648 | BUILT_IN_SCHEMES = [ 649 | 'lead', 650 | 'contact', 651 | 'opportunity', 652 | ] 653 | custom_activity_type_ids = [ 654 | x['id'] for x in api.get("custom_activity")["data"] 655 | ] 656 | 657 | custom_fields = [] 658 | for schema in BUILT_IN_SCHEMES + custom_activity_type_ids: 659 | if schema.startswith('actitype_'): 660 | schema_fields = api.get_custom_fields(f"activity/{schema}") 661 | else: 662 | schema_fields = api.get_custom_fields(schema) 663 | 664 | # Add `object_type` field so we can use it to match/map IDs later on in case there are 2 custom fields 665 | # with the same name - one Lead Custom Field, and another Custom Activity Custom Field 666 | schema_fields = [ 667 | {**x, **{'object_type': schema}} for x in schema_fields 668 | ] 669 | custom_fields.extend(schema_fields) 670 | 671 | return custom_fields 672 | 673 | from_custom_fields = get_custom_fields(from_api) 674 | to_custom_fields = get_custom_fields(to_api) 675 | for from_cf in from_custom_fields: 676 | to_cf = next( 677 | ( 678 | x 679 | for x in to_custom_fields 680 | if x['name'] == from_cf['name'] 681 | and ( 682 | x['object_type'] == from_cf['object_type'] 683 | or x['object_type'] 684 | == map_from_to_id.get(from_cf['object_type']) 685 | ) 686 | ), 687 | None, 688 | ) 689 | if to_cf: 690 | map_from_to_id[from_cf['id']] = to_cf['id'] 691 | 692 | # Lead & opportunity statuses 693 | from_statuses = ( 694 | from_api.get_lead_statuses() + from_api.get_opportunity_statuses() 695 | ) 696 | to_statuses = ( 697 | to_api.get_lead_statuses() + to_api.get_opportunity_statuses() 698 | ) 699 | for from_status in from_statuses: 700 | to_status = next( 701 | (x for x in to_statuses if x['label'] == from_status['label']), 702 | None, 703 | ) 704 | if to_status: 705 | map_from_to_id[from_status['id']] = to_status['id'] 706 | 707 | # Email templates 708 | from_templates = from_api.get_all_items('email_template') 709 | to_templates = to_api.get_all_items('email_template') 710 | for from_template in from_templates: 711 | to_template = next( 712 | (x for x in to_templates if x['name'] == from_template['name']), 713 | None, 714 | ) 715 | if to_template: 716 | map_from_to_id[from_template['id']] = to_template['id'] 717 | 718 | # SMS templates 719 | from_templates = from_api.get_all_items('sms_template') 720 | to_templates = to_api.get_all_items('sms_template') 721 | for from_template in from_templates: 722 | to_template = next( 723 | (x for x in to_templates if x['name'] == from_template['name']), 724 | None, 725 | ) 726 | if to_template: 727 | map_from_to_id[from_template['id']] = to_template['id'] 728 | 729 | # Workflows 730 | from_workflows = from_api.get_all_items('sequence') 731 | to_workflows = to_api.get_all_items('sequence') 732 | for from_workflow in from_workflows: 733 | to_workflow = next( 734 | (x for x in to_workflows if x['name'] == from_workflow['name']), 735 | None, 736 | ) 737 | if to_workflow: 738 | map_from_to_id[from_workflow['id']] = to_workflow['id'] 739 | 740 | # Groups 741 | from_groups = from_api.get('group', params={'_fields': 'id,name'})['data'] 742 | to_groups = to_api.get('group', params={'_fields': 'id,name'})['data'] 743 | for from_group in from_groups: 744 | to_group = next( 745 | (x for x in to_groups if x['name'] == from_group['name']), 746 | None, 747 | ) 748 | if to_group: 749 | map_from_to_id[from_group['id']] = to_group['id'] 750 | 751 | return map_from_to_id 752 | 753 | 754 | def get_smartviews(api): 755 | smart_views = [] 756 | 757 | smart_views_ordered = api.get_all_items("saved_search", params={"_fields": "id", "type__in": "lead,contact"}) 758 | for smart_view in smart_views_ordered: 759 | detailed_smart_view = api.get(f'saved_search/{smart_view["id"]}') 760 | smart_views.append(detailed_smart_view) 761 | 762 | return smart_views 763 | 764 | print("\nCopying Smart Views") 765 | from_smart_views = get_smartviews(from_api) 766 | 767 | # Filter our Smart Views that already exist (by name) 768 | to_smart_views = get_smartviews(to_api) 769 | to_smart_view_names = [x['name'] for x in to_smart_views] 770 | from_smart_views = [x for x in from_smart_views if x['name'] not in to_smart_view_names] 771 | 772 | # Used to map old to new IDs (custom fields, custom activity types, lead & opportunity statuses, email templates...) 773 | # that will be used in global search & replace within each Smart View query 774 | map_from_to_id = get_id_mappings() 775 | 776 | # Used to map old to new Smart View IDs for Smart Views that use `in:SMART_VIEW_ID` in their queries 777 | map_from_to_smart_view_id = {} 778 | created_smart_views = [] 779 | 780 | # Sort Smart Views as they appear in the original organization 781 | # (when you add a new Smart View, it will show up at the top of the list) 782 | reverse = list(reversed(from_smart_views)) 783 | 784 | 785 | def get_memberships(api, organization): 786 | resp = api.get(f"organization/{organization['id']}", params={"_fields": "memberships,inactive_memberships"}) 787 | return resp["memberships"] + resp["inactive_memberships"] 788 | 789 | from_memberships = get_memberships(from_api, from_organization) 790 | to_memberships = get_memberships(to_api, to_organization) 791 | from_to_membership_id = {} 792 | for from_membership in from_memberships: 793 | to_membership = next((x for x in to_memberships if x['user_email'] == from_membership['user_email']), None) 794 | if to_membership: 795 | from_to_membership_id[from_membership['id']] = to_membership['id'] 796 | 797 | to_user_membership_id = to_api.get("me")["memberships"][0]["id"] 798 | 799 | # Create Smart Views in the destination organization 800 | for smart_view in reverse: 801 | # Adjust sharing IDs 802 | if smart_view["is_shared"]: 803 | # Transfer as-is 804 | pass 805 | else: 806 | # Replace owner membership ID from old to new org 807 | new_shared_with = [] 808 | for old_membership_id in smart_view["shared_with"]: 809 | new_membership_id = from_to_membership_id.get(old_membership_id) 810 | if new_membership_id: 811 | new_shared_with.append(new_membership_id) 812 | 813 | if not new_shared_with: 814 | # No new matching users, default to the user running the transfer 815 | new_shared_with.append(to_user_membership_id) 816 | 817 | smart_view['shared_with'] = new_shared_with 818 | 819 | # Replace IDs 820 | s_query = smart_view.get('s_query') 821 | query = smart_view.get('query') 822 | 823 | if s_query: 824 | smart_view['s_query'] = structured_replace(s_query, map_from_to_id) 825 | elif query: 826 | smart_view['query'] = textual_replace(query, map_from_to_id) 827 | 828 | # If user specifically selected some columns, replace IDs within those as well (in case they are custom fields) 829 | smart_view['selected_fields'] = structured_replace(smart_view['selected_fields'], map_from_to_id) 830 | 831 | try: 832 | old_smart_view_id = smart_view.pop('id') 833 | del smart_view["organization_id"] 834 | del smart_view["user_id"] 835 | 836 | new_smart_view = to_api.post("saved_search", data=smart_view) 837 | map_from_to_smart_view_id[old_smart_view_id] = new_smart_view['id'] 838 | 839 | created_smart_views.append(new_smart_view) 840 | print(f'Added `{smart_view["name"]}`') 841 | except APIError as e: 842 | print(f"Couldn't add `{smart_view['name']}` because {str(e)}") 843 | 844 | # Replace any Smart View IDs in case one Smart View is nested within the other 845 | for smart_view in created_smart_views: 846 | # Replace Smart View IDs 847 | s_query = smart_view.get('s_query') 848 | query = smart_view.get('query') 849 | 850 | if s_query: 851 | smart_view['s_query'] = structured_replace(s_query, map_from_to_smart_view_id) 852 | elif query: 853 | smart_view['query'] = textual_replace(query, map_from_to_smart_view_id) 854 | 855 | # Update the Smart View if necessary 856 | if smart_view['s_query'] != s_query or smart_view['query'] != query: 857 | to_api.put(f"saved_search/{smart_view['id']}", data=smart_view) 858 | 859 | if args.webhooks: 860 | print("\nCopying Webhooks") 861 | webhooks = from_api.get_all_items('webhook') 862 | for webhook in webhooks: 863 | del webhook["id"] 864 | 865 | try: 866 | to_api.post("webhook", data=webhook) 867 | print(f'Added `{webhook["url"]}`') 868 | except APIError as e: 869 | print(f"Couldn't add `{webhook['url']}` because {str(e)}") 870 | -------------------------------------------------------------------------------- /scripts/csv_to_cio.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | import argparse 6 | import csv 7 | import json 8 | import re 9 | import sys 10 | import time 11 | 12 | import closeio_api 13 | import unidecode 14 | from closeio_api import Client as CloseIO_API 15 | from closeio_api.utils import count_lines, title_case, uncamel 16 | from progressbar import ProgressBar 17 | from progressbar.widgets import ETA, Bar, FileTransferSpeed, Percentage 18 | from requests.exceptions import ConnectionError 19 | 20 | parser = argparse.ArgumentParser(description='Import leads from CSV file') 21 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 22 | parser.add_argument( 23 | '--skip_duplicates', 24 | action='store_true', 25 | help='Skip leads that are already present in Close (determined by company name).', 26 | ) 27 | parser.add_argument( 28 | '--no_grouping', 29 | action='store_true', 30 | help='Turn off the default group-by-company behavior.', 31 | ) 32 | parser.add_argument('file', help='Path to the csv file') 33 | args = parser.parse_args() 34 | 35 | reader = csv.DictReader(open(args.file)) 36 | headers = reader.fieldnames 37 | 38 | import_count = count_lines(args.file) # may have no trailing newline 39 | 40 | cnt = success_cnt = 0 41 | 42 | 43 | def warning(*objs): 44 | print("WARNING: ", *objs, file=sys.stderr) 45 | 46 | 47 | def slugify(str, separator='_'): 48 | str = unidecode.unidecode(str).lower().strip() 49 | return re.sub(r'\W+', separator, str).strip(separator) 50 | 51 | 52 | # Look for headers/columns that match these, case-insensitive. All other headers will be treated as custom fields. 53 | expected_headers = ( 54 | 'company', # multiple contacts will be grouped if company names match 55 | 'url', 56 | 'status', 57 | 'contact', # name of contact 58 | 'title', 59 | 'email', 60 | 'phone', # recommended to start with "+" followed by country code (e.g., +1 650 555 1234) 61 | 'mobile_phone', 62 | 'fax', 63 | 'address', 64 | 'address_1', # if address is missing, address_1 and address_2 will be combined to create it. 65 | 'address_2', # if address is missing, address_1 and address_2 will be combined to create it. 66 | 'city', 67 | 'state', 68 | 'zip', 69 | 'country', 70 | ) 71 | 72 | # Remove trailing empty column headers 73 | while not len(headers[-1].strip()): 74 | del headers[-1] 75 | 76 | # Check for duplicated column names 77 | if len(set(headers)) != len(headers): 78 | raise Exception('Cannot have duplicate column header names') 79 | 80 | # Check for duplicates after normalization 81 | normalized_headers = [slugify(col) for col in headers] 82 | if len(set(normalized_headers)) != len(normalized_headers): 83 | raise Exception( 84 | 'After column header names were normalized there were duplicate column header names' 85 | ) 86 | 87 | # build a map of header names -> index in actual header row 88 | header_indices = { 89 | col: i for (i, col) in enumerate(normalized_headers) 90 | } # normalized columns as keys 91 | header_indices.update( 92 | {col: i for (i, col) in enumerate(headers)} 93 | ) # add in original column names as keys 94 | expected_headers = [ 95 | col for col in normalized_headers if col in expected_headers 96 | ] 97 | custom_headers = list( 98 | set(normalized_headers) - set(expected_headers) 99 | ) # non-recognized fields in slug-ed format 100 | 101 | # restore original version (capitalization) to custom fields 102 | custom_headers = [ 103 | headers[header_indices[normalized_col]] 104 | for normalized_col in custom_headers 105 | ] 106 | 107 | print("\nRecognized these column names:") 108 | print(f'> {", ".join(expected_headers)}') 109 | if len(custom_headers): 110 | print( 111 | "\nThe following column names weren't recognized, and will be imported as custom fields:" 112 | ) 113 | print(f'> {", ".join(custom_headers)}') 114 | print('') 115 | 116 | 117 | def lead_from_row(row): 118 | row = { 119 | column_name: column_value.strip() 120 | for column_name, column_value in row.items() 121 | } # strip unnecessary white spaces 122 | 123 | # check if the row isn't empty 124 | has_data = { 125 | column_name: column_value 126 | for column_name, column_value in row.items() 127 | if column_value 128 | } 129 | if not has_data: 130 | return None 131 | 132 | lead = {'name': row['company'], 'contacts': [], 'custom': {}} 133 | 134 | if 'url' in row: 135 | lead['url'] = row['url'] 136 | 137 | if 'status' in row: 138 | lead['status'] = row['status'] 139 | 140 | if lead.get('url') and '://' not in lead['url']: 141 | lead['url'] = 'http://%s' % lead['url'] 142 | 143 | # custom fields 144 | for field in custom_headers: 145 | if field in row: 146 | lead['custom'][field] = row[field] 147 | 148 | # address 149 | address = {} 150 | if 'address' in row: 151 | address['address'] = row['address'] 152 | elif 'address_1' in row or 'address_2' in row: 153 | address['address'] = f'{row["address_1"]} {row["address_2"]}'.strip() 154 | if 'city' in row: 155 | address['city'] = title_case(row['city']) 156 | if 'state' in row: 157 | address['state'] = row['state'] 158 | if 'zip' in row: 159 | address['zipcode'] = row['zip'] 160 | if 'country' in row: 161 | address['country'] = row['country'] 162 | if len(address): 163 | lead['addresses'] = [address] 164 | 165 | # contact 166 | contact = {} 167 | if 'contact' in row: 168 | contact['name'] = uncamel(row['contact']) 169 | if 'title' in row: 170 | contact['title'] = row['title'] 171 | 172 | phones = [] 173 | if 'phone' in row: 174 | phones.append({'phone': row['phone'], 'type': 'office'}) 175 | if 'mobile_phone' in row: 176 | phones.append({'phone': row['mobile_phone'], 'type': 'mobile'}) 177 | if 'fax' in row: 178 | phones.append({'phone': row['fax'], 'type': 'fax'}) 179 | if len(phones): 180 | contact['phones'] = phones 181 | 182 | emails = [] 183 | if 'email' in row: 184 | emails.append({'email': row['email'], 'type': 'office'}) 185 | if len(emails): 186 | contact['emails'] = emails 187 | 188 | if len(contact): 189 | lead['contacts'] = [contact] 190 | 191 | return lead 192 | 193 | 194 | # Create leads, grouped by company name 195 | unique_leads = {} 196 | for i, row in enumerate(reader): 197 | lead = lead_from_row(row) 198 | if not lead: 199 | continue 200 | 201 | if args.no_grouping: 202 | grouper = 'row-num-%s' % i 203 | else: 204 | # group by lead Name (company) if possible, otherwise put each row in its own lead 205 | grouper = lead['name'] if lead['name'] else ('row-num-%s' % i) 206 | 207 | if grouper not in unique_leads: 208 | unique_leads[grouper] = lead 209 | elif lead['contacts'] not in unique_leads[grouper]['contacts']: 210 | unique_leads[grouper]['contacts'].extend(lead['contacts']) 211 | 212 | print( 213 | f'Found {len(unique_leads)} leads (grouped by company) from {import_count} contacts.' 214 | ) 215 | 216 | print('\nHere is a sample lead (last row):') 217 | print(json.dumps(unique_leads[grouper], indent=4)) 218 | 219 | print('\nAre you sure you want to continue? (y/n) ') 220 | if input('') != 'y': 221 | sys.exit() 222 | 223 | ############################################################################## 224 | 225 | api = CloseIO_API(args.api_key) 226 | 227 | progress_widgets = [ 228 | 'Importing %d rows: ' % import_count, 229 | Percentage(), 230 | ' ', 231 | Bar(), 232 | ' ', 233 | ETA(), 234 | ' ', 235 | FileTransferSpeed(), 236 | ] 237 | pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() 238 | 239 | dupes_cnt = 0 240 | 241 | for key, val in unique_leads.items(): 242 | retries = 5 243 | 244 | # check if it's a duplicate 245 | dupe = False 246 | if args.skip_duplicates and val.get('name'): 247 | 248 | # get the org id necessary for search 249 | org_id = api.get('api_key')['data'][0]['organization_id'] 250 | 251 | # get all the search results for given lead name 252 | search_results = [] 253 | filters = { 254 | 'organization_id': org_id, 255 | 'query': 'name:"%s"' % key, 256 | } 257 | has_more = True 258 | skip = 0 259 | while has_more: 260 | filters['_skip'] = skip 261 | resp = api.get('lead', params=filters) 262 | results = resp['data'] 263 | search_results.extend(results) 264 | has_more = resp['has_more'] 265 | skip += len(results) 266 | 267 | for result in search_results: 268 | if result['display_name'] == val['name']: 269 | dupe = True 270 | break 271 | 272 | while retries > 0: 273 | if dupe: 274 | dupes_cnt += 1 275 | warning('Duplicate - not importing: %s' % val['name']) 276 | break 277 | 278 | try: 279 | retries -= 1 280 | api.post('lead', val) 281 | retries = 0 282 | success_cnt += 1 283 | except closeio_api.APIError as err: 284 | warning('An error occurred while saving "%s"' % key) 285 | warning(err) 286 | retries = 0 287 | except ConnectionError as e: 288 | warning('Connection error occurred, retrying... (%d/5)' % retries) 289 | if retries == 0: 290 | raise 291 | time.sleep(2) 292 | 293 | cnt += 1 294 | if cnt > import_count: 295 | warning('Warning: count overflow') 296 | cnt = import_count 297 | pbar.update(cnt) 298 | 299 | pbar.finish() 300 | 301 | print(f'Successful responses: {success_cnt} of {len(unique_leads)}') 302 | if args.skip_duplicates: 303 | print(f'Duplicates: {dupes_cnt}') 304 | -------------------------------------------------------------------------------- /scripts/custom_field_change_report.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import sys 4 | 5 | from closeio_api import APIError, Client as CloseIO_API 6 | 7 | parser = argparse.ArgumentParser( 8 | description='Export a list of custom field changes for a specific custom field' 9 | ) 10 | 11 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 12 | parser.add_argument( 13 | '--start-date', 14 | '-s', 15 | help='The start of the date range you want to export call data for in yyyy-mm-dd format.', 16 | ) 17 | parser.add_argument( 18 | '--end-date', 19 | '-e', 20 | help='The end of the date range you want to export call data for in yyyy-mm-dd format.', 21 | ) 22 | parser.add_argument( 23 | '--custom-field', 24 | '-f', 25 | required=True, 26 | help='The lcf id of the custom field you\'re searching for', 27 | ) 28 | parser.add_argument( 29 | '--lead-id', 30 | '-l', 31 | help='Use this field if you want to narrow your search to a specific lead_id', 32 | ) 33 | parser.add_argument( 34 | '--user-id', 35 | '-u', 36 | help='Use this field if you want to narrow your search to changes done by a specific user', 37 | ) 38 | args = parser.parse_args() 39 | 40 | api = CloseIO_API(args.api_key) 41 | org_id = api.get('me')['organizations'][0]['id'] 42 | org = api.get( 43 | 'organization/' + org_id, 44 | params={ 45 | '_fields': 'id,name,memberships,inactive_memberships,lead_custom_fields' 46 | }, 47 | ) 48 | org_name = org['name'].replace('/', "") 49 | org_memberships = org['memberships'] + org['inactive_memberships'] 50 | try: 51 | custom_field_name = [ 52 | i for i in org['lead_custom_fields'] if i['id'] == args.custom_field 53 | ][0]['name'] 54 | except IndexError as e: 55 | print( 56 | f"ERROR: Could not find custom field {args.custom_field} in {org_name}" 57 | ) 58 | sys.exit() 59 | 60 | users = {} 61 | 62 | for member in org_memberships: 63 | users[member['user_id']] = member['user_full_name'] 64 | 65 | params = {'object_type': 'lead', 'action': 'updated'} 66 | 67 | events = [] 68 | 69 | custom_lcf = "custom." + str(args.custom_field) 70 | 71 | if args.start_date: 72 | params['date_updated__gte'] = args.start_date 73 | if args.end_date: 74 | params['date_updated__lte'] = args.end_date 75 | if args.lead_id: 76 | params['lead_id'] = args.lead_id 77 | if args.user_id: 78 | params['user_id'] = args.user_id 79 | 80 | has_more = True 81 | cursor = '' 82 | count = 0 83 | while has_more: 84 | params['_cursor'] = cursor 85 | try: 86 | resp = api.get('event', params=params) 87 | for event in resp['data']: 88 | if ( 89 | custom_lcf in event['changed_fields'] 90 | and event.get('previous_data') 91 | and event.get('data') 92 | ): 93 | events.append( 94 | { 95 | 'Date': event['date_created'], 96 | 'Lead ID': event['lead_id'], 97 | 'Lead Name': event['data']['display_name'], 98 | 'User that Made the Change': users[event['user_id']], 99 | 'Old Value': event['previous_data'].get(custom_lcf), 100 | 'New Value': event['data'].get(custom_lcf), 101 | } 102 | ) 103 | cursor = resp['cursor_next'] 104 | count += len(resp['data']) 105 | print(f"Analyzed Events: {count}") 106 | has_more = bool(resp['cursor_next']) 107 | except APIError as e: 108 | pass 109 | 110 | print(f"Total {custom_field_name} Change Events Found: {len(events)}") 111 | 112 | f = open( 113 | f'{org_name} {custom_field_name} Custom Field Changes.csv', 114 | 'w', 115 | newline='', 116 | encoding='utf-8', 117 | ) 118 | try: 119 | ordered_keys = [ 120 | 'Date', 121 | 'Lead ID', 122 | 'Lead Name', 123 | 'User that Made the Change', 124 | 'Old Value', 125 | 'New Value', 126 | ] 127 | writer = csv.DictWriter(f, ordered_keys) 128 | writer.writeheader() 129 | writer.writerows(events) 130 | finally: 131 | f.close() 132 | -------------------------------------------------------------------------------- /scripts/delete_emails_from_contacts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import csv 5 | import sys 6 | 7 | from closeio_api import APIError, Client as CloseIO_API 8 | 9 | parser = argparse.ArgumentParser( 10 | description='Remove email addresses from contacts in CSV file' 11 | ) 12 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 13 | parser.add_argument( 14 | '--confirmed', 15 | action='store_true', 16 | help='Confirm making changes. Otherwise this script is not going to modify any data.', 17 | ) 18 | parser.add_argument( 19 | '--verbose', '-v', action='store_true', help='Increase logging verbosity.' 20 | ) 21 | parser.add_argument('file', help='Path to the csv file') 22 | args = parser.parse_args() 23 | 24 | reader = csv.DictReader(open(args.file)) 25 | if any( 26 | field not in reader.fieldnames for field in ['contact_id', 'email_address'] 27 | ): 28 | print( 29 | 'contact_id or email_address headers could not be found in your csv file.' 30 | ) 31 | sys.exit(-1) 32 | 33 | api = CloseIO_API(args.api_key) 34 | 35 | for row in reader: 36 | contact_id = row['contact_id'] 37 | email_address = row['email_address'] 38 | 39 | if args.verbose: 40 | print(f'Attempting to remove {email_address} from {contact_id}') 41 | 42 | try: 43 | contact = api.get('contact/' + contact_id) 44 | 45 | if not contact['emails']: 46 | if args.verbose: 47 | print( 48 | f'Skipping {contact_id} because it has no email addresses' 49 | ) 50 | continue 51 | 52 | emails = list( 53 | filter( 54 | lambda email: email['email'] != email_address, 55 | contact['emails'], 56 | ) 57 | ) 58 | if args.confirmed: 59 | resp = api.put('contact/' + contact_id, {'emails': emails}) 60 | if args.verbose: 61 | print(f'Removed {email_address} from {contact_id}') 62 | except APIError as e: 63 | if args.verbose: 64 | print( 65 | f'Encountered an API error ({e.response.status_code}): {e.response.text}' 66 | ) 67 | -------------------------------------------------------------------------------- /scripts/delete_secondary_addresses.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import time 7 | 8 | from closeio_api import Client as CloseIO_API 9 | 10 | 11 | def run(api_key, confirmed, limit=100): 12 | api = CloseIO_API(api_key) 13 | 14 | # loop through existing leads with multiple addresses 15 | 16 | LEADS_QUERY_WITH_MULTIPLE_ADDRESSES = "addresses > 1 sort:activities" 17 | has_more = True 18 | 19 | while has_more: 20 | resp = api.get( 21 | 'lead', 22 | params={ 23 | 'query': LEADS_QUERY_WITH_MULTIPLE_ADDRESSES, 24 | '_fields': 'id,addresses', 25 | '_limit': limit, 26 | }, 27 | ) 28 | 29 | leads = resp['data'] 30 | 31 | for lead in leads: 32 | if len(lead['addresses']) < 2: 33 | logging.warning("unexpected result: %s", lead) 34 | continue # this shouldn't happen based on the search query, but just to be safe... 35 | if confirmed: 36 | api.put( 37 | 'lead/' + lead['id'], 38 | data={'addresses': lead['addresses'][:1]}, 39 | ) 40 | logging.info( 41 | "removed %d extra address(es) for %s\n%s" 42 | % ( 43 | len(lead['addresses'][1:]), 44 | lead['id'], 45 | lead['addresses'][1:], 46 | ) 47 | ) 48 | 49 | has_more = resp['has_more'] 50 | 51 | time.sleep( 52 | 2 53 | ) # give the search indexer some time to catch up with the changes 54 | 55 | 56 | if __name__ == '__main__': 57 | parser = argparse.ArgumentParser( 58 | description='Delete all but first address for leads with multiple addresses.' 59 | ) 60 | parser.add_argument('--api-key', '-k', required=True, help='') 61 | parser.add_argument( 62 | '--confirmed', 63 | '-c', 64 | action='store_true', 65 | help='Without this flag, the script will do a dry run without actually updating any data.', 66 | ) 67 | args = parser.parse_args() 68 | 69 | log_format = "[%(asctime)s] %(levelname)s %(message)s" 70 | if not args.confirmed: 71 | log_format = 'DRY RUN: ' + log_format 72 | logging.basicConfig(level=logging.INFO, format=log_format) 73 | logging.debug('parameters: %s' % vars(args)) 74 | 75 | run(api_key=args.api_key, confirmed=args.confirmed) 76 | -------------------------------------------------------------------------------- /scripts/delete_tasks_for_inactive_users.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import sys 5 | 6 | from closeio_api import Client as CloseIO_API 7 | 8 | parser = argparse.ArgumentParser( 9 | description='Remove tasks associated with inactive users' 10 | ) 11 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 12 | parser.add_argument( 13 | '--confirmed', 14 | action='store_true', 15 | help='Confirm making changes. Otherwise this script is not going to modify any data.', 16 | ) 17 | parser.add_argument( 18 | '--verbose', '-v', action='store_true', help='Increase logging verbosity.' 19 | ) 20 | args = parser.parse_args() 21 | 22 | api = CloseIO_API(args.api_key) 23 | 24 | # Get IDs of all inactive users in a given org 25 | org_id = api.get('me')['organizations'][0]['id'] 26 | 27 | org = api.get(f'organization/{org_id}') 28 | inactive_users = [m['user_id'] for m in org['inactive_memberships']] 29 | 30 | # Get IDs of all the tasks assigned to these inactive users 31 | task_ids = [] 32 | total_cnt = len(inactive_users) 33 | for idx, user_id in enumerate(inactive_users): 34 | if args.verbose: 35 | print(f'Gathering tasks for {user_id} ({(idx + 1)}/{total_cnt})') 36 | 37 | has_more = True 38 | skip = 0 39 | limit = 100 40 | while has_more: 41 | resp = api.get( 42 | 'task', 43 | params={ 44 | 'assigned_to': user_id, 45 | '_skip': skip, 46 | '_limit': limit, 47 | '_fields': 'id', 48 | }, 49 | ) 50 | task_ids.extend(t['id'] for t in resp['data']) 51 | has_more = resp['has_more'] 52 | skip += limit 53 | 54 | if args.verbose: 55 | print(f'Found {len(task_ids)} tasks') 56 | 57 | if not args.confirmed: 58 | print( 59 | 'This is a dry run, so the tasks are not deleted. Use the --confirmed flag to delete them.' 60 | ) 61 | sys.exit(0) 62 | 63 | total_cnt = len(task_ids) 64 | for idx, task_id in enumerate(task_ids): 65 | api.delete('task/' + task_id) 66 | if args.verbose: 67 | print(f'Deleting {(idx + 1)}/{total_cnt}') 68 | -------------------------------------------------------------------------------- /scripts/events_by_request_id.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import json 5 | import logging 6 | import sys 7 | 8 | from closeio_api import Client as CloseIO_API 9 | 10 | parser = argparse.ArgumentParser(description='Get Events By Request ID') 11 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 12 | parser.add_argument( 13 | '--request-id', '-r', required=True, help='request_id from event log.' 14 | ) 15 | parser.add_argument( 16 | '--output', '-o', required=True, help='json output file of events' 17 | ) 18 | parser.add_argument( 19 | '--verbose', '-v', action='store_true', help='Increase logging verbosity.' 20 | ) 21 | args = parser.parse_args() 22 | 23 | api = CloseIO_API(args.api_key) 24 | 25 | 26 | def setup_logger(): 27 | logger = logging.getLogger('closeio.api.events_by_request_id') 28 | logger.setLevel(logging.INFO) 29 | if args.verbose: 30 | logger.setLevel(logging.DEBUG) 31 | 32 | ch = logging.StreamHandler(sys.stdout) 33 | formatter = logging.Formatter( 34 | '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 35 | ) 36 | ch.setFormatter(formatter) 37 | logger.addHandler(ch) 38 | return logger 39 | 40 | 41 | logger = setup_logger() 42 | 43 | output = open(args.output, "w") 44 | output.write('{"events": [') 45 | 46 | has_more = True 47 | cursor = None 48 | first_iter = True 49 | while has_more: 50 | resp = api.get( 51 | 'event', params={'_cursor': cursor, 'request_id': args.request_id} 52 | ) 53 | cursor = resp['cursor_next'] 54 | has_more = bool(cursor) 55 | 56 | for event in resp['data']: 57 | if not first_iter: 58 | output.write(",") 59 | json.dump(event, output, indent=4) 60 | first_iter = False 61 | 62 | output.write("]}") 63 | output.close() 64 | -------------------------------------------------------------------------------- /scripts/export_activities_to_json.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | from datetime import datetime 4 | from operator import itemgetter 5 | 6 | import gevent.monkey 7 | from closeio_api import Client as CloseIO_API 8 | from dateutil.relativedelta import relativedelta 9 | from gevent.pool import Pool 10 | 11 | gevent.monkey.patch_all() 12 | 13 | parser = argparse.ArgumentParser( 14 | description='Export Close activity data within a date range into a JSON file' 15 | ) 16 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 17 | parser.add_argument( 18 | '--date-start', 19 | '-s', 20 | required=True, 21 | help='The yyyy-mm-dd you want to start looking for activities', 22 | ) 23 | parser.add_argument( 24 | '--date-end', 25 | '-e', 26 | required=True, 27 | help='The yyyy-mm-dd you want to end looking for activities', 28 | ) 29 | parser.add_argument( 30 | '--activity-type', 31 | '-t', 32 | choices=[ 33 | 'call', 34 | 'created', 35 | 'email', 36 | 'lead_status_change', 37 | 'note', 38 | 'opportunity_status_change', 39 | 'sms', 40 | 'task_completed', 41 | ], 42 | required=True, 43 | help='The type of activity you\'d like to export to JSON', 44 | ) 45 | args = parser.parse_args() 46 | 47 | api = CloseIO_API(args.api_key) 48 | 49 | days = [] 50 | activities = [] 51 | 52 | endpoint = args.activity_type 53 | if endpoint == 'opportunity_status_change': 54 | endpoint = 'status_change/opportunity' 55 | elif endpoint == 'lead_status_change': 56 | endpoint = 'status_change/lead' 57 | 58 | starting_date = datetime.strptime(args.date_start, '%Y-%m-%d') 59 | ending_date = ( 60 | starting_date + relativedelta(days=+1) - relativedelta(seconds=+1) 61 | ) 62 | ending_date_final = datetime.strptime(args.date_end, '%Y-%m-%d') 63 | 64 | # Generate a list of days to cycle through in the date range 65 | while starting_date < ending_date_final: 66 | starting_date_string = datetime.strftime( 67 | starting_date, "%Y-%m-%dT%H:%M:%S" 68 | ) 69 | ending_date_string = datetime.strftime(ending_date, "%Y-%m-%dT%H:%M:%S") 70 | days.append( 71 | { 72 | 'day': starting_date.strftime('%Y-%m-%d'), 73 | 'start_date': starting_date_string, 74 | 'end_date': ending_date_string, 75 | } 76 | ) 77 | starting_date = starting_date + relativedelta(days=+1) 78 | ending_date = ( 79 | starting_date + relativedelta(days=+1) - relativedelta(seconds=+1) 80 | ) 81 | 82 | 83 | # Method to get all of the specified activities for a specific day. 84 | def getActivities(day): 85 | print(f"Getting all {args.activity_type} activites for {day['day']}...") 86 | has_more = True 87 | offset = 0 88 | while has_more: 89 | resp = api.get( 90 | 'activity/' + endpoint, 91 | params={ 92 | '_skip': offset, 93 | 'date_created__gte': day['start_date'], 94 | 'date_created__lte': day['end_date'], 95 | }, 96 | ) 97 | for activity in resp['data']: 98 | activities.append(activity) 99 | offset += len(resp['data']) 100 | has_more = resp['has_more'] 101 | 102 | 103 | pool = Pool(5) 104 | pool.map(getActivities, days) 105 | 106 | # Sort all activities by date_created to be in order because they were pulled in parallel 107 | activities = sorted(activities, key=itemgetter('date_created'), reverse=True) 108 | 109 | org_name = api.get('me')['organizations'][0]['name'].replace('/', '') 110 | with open( 111 | '%s - %s activity export between %s and %s.json' 112 | % (org_name, args.activity_type, args.date_start, args.date_end), 113 | 'w', 114 | ) as outfile: 115 | json.dump(activities, outfile, indent=4) 116 | -------------------------------------------------------------------------------- /scripts/export_calls.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import math 4 | 5 | import gevent.monkey 6 | 7 | gevent.monkey.patch_all() 8 | from gevent.pool import Pool 9 | 10 | pool = Pool(7) 11 | 12 | from closeio_api import Client as CloseApi 13 | 14 | parser = argparse.ArgumentParser( 15 | description='Download a CSV of calls from/to a specific Close number over a specified time range' 16 | ) 17 | 18 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 19 | parser.add_argument( 20 | '--direction', 21 | '-d', 22 | default=None, 23 | choices=['inbound', 'outbound'], 24 | help='Use this field to only export inbound calls or outbound calls. Leave this field blank to export both.', 25 | ) 26 | parser.add_argument( 27 | '--missed-or-voicemail', 28 | '-m', 29 | action='store_true', 30 | help='Use this field to only export missed calls, voicemails, or calls of a duration 0', 31 | ) 32 | parser.add_argument( 33 | '--end-date', 34 | '-e', 35 | help='The end of the date range you want to export call data for in yyyy-mm-dd format.', 36 | ) 37 | parser.add_argument( 38 | '--start-date', 39 | '-s', 40 | help='The start of the date range you want to export call data for in yyyy-mm-dd format.', 41 | ) 42 | parser.add_argument( 43 | '--phone-number', 44 | '-p', 45 | help='The phone number you\'d like to export the calls for in E164 international format. Example: +18552567346', 46 | ) 47 | parser.add_argument( 48 | '--user-id', 49 | '-u', 50 | help='Use this field if you only want to find calls for a specific user', 51 | ) 52 | parser.add_argument( 53 | '--call-costs', 54 | '-c', 55 | action='store_true', 56 | help='Use this field if you want to include a call cost column in your export CSV', 57 | ) 58 | parser.add_argument( 59 | '--transcripts', 60 | '-t', 61 | action='store_true', 62 | help='Use this field if you want to include a call transcript column in your export CSV', 63 | ) 64 | 65 | args = parser.parse_args() 66 | 67 | api = CloseApi(args.api_key) 68 | 69 | params = {} 70 | 71 | if not args.start_date and not args.end_date: 72 | lead_query = 'has:calls' 73 | else: 74 | lead_query = 'call(' 75 | 76 | if args.start_date: 77 | params['date_created__gte'] = args.start_date 78 | lead_query += f' date >= "{args.start_date}"' 79 | 80 | if args.end_date: 81 | params['date_created__lt'] = args.end_date 82 | lead_query += f' date < "{args.end_date}"' 83 | 84 | lead_query += ")" 85 | 86 | if args.user_id: 87 | params['user_id'] = args.user_id 88 | 89 | print("Getting Leads...") 90 | print(f'\t{lead_query}') 91 | 92 | def get_all(url, params=None): 93 | if params is None: 94 | params = {} 95 | 96 | items = [] 97 | has_more = True 98 | offset = 0 99 | while has_more: 100 | params["_skip"] = offset 101 | resp = api.get(url, params=params) 102 | items.extend(resp['data']) 103 | offset += len(resp["data"]) 104 | has_more = resp["has_more"] 105 | return items 106 | 107 | def get_all_leads_with_slices(params, slice_size=500): 108 | leads = [] 109 | 110 | total_leads = api.get("lead", params={"_limit": 0, "query": params["query"], "_fields": "id"})[ 111 | "total_results"] 112 | total_slices = int(math.ceil(float(total_leads) / slice_size)) 113 | 114 | slices = [] 115 | for slice_number in range(1, total_slices + 1): 116 | slices.append({"total_slices": total_slices, "slice": slice_number, "params": params}) 117 | 118 | def _get_all_leads_slice(slice_obj): 119 | params = slice_obj["params"] 120 | 121 | new_params = params.copy() 122 | new_params["query"] = f'({params["query"]}) slice:{slice_obj["slice"]}/{slice_obj["total_slices"]}' 123 | 124 | leads.extend(get_all("lead", params=new_params)) 125 | 126 | pool.map(_get_all_leads_slice, slices) 127 | 128 | return leads 129 | 130 | 131 | leads = get_all_leads_with_slices(params={"query": lead_query, "_fields": "id,contacts,display_name"}) 132 | 133 | lead_id_to_name = {} 134 | contacts_id_to_name = {} 135 | for lead in leads: 136 | lead_id_to_name[lead["id"]] = lead["display_name"] 137 | for contact in lead["contacts"]: 138 | contacts_id_to_name[contact["id"]] = contact['name'] 139 | 140 | call_fields = [ 141 | 'id', 'user_id', 'duration', 'disposition', 'status', 'direction', 'date_created', 'remote_phone', 'local_phone', 142 | 'voicemail_url', 'recording_url', 'source', 'lead_id', 'updated_by_name', 'contact_id', 143 | ] 144 | 145 | if args.call_costs: 146 | call_fields += ['cost'] 147 | 148 | if args.transcripts: 149 | call_fields += ['recording_transcript'] 150 | 151 | params['_fields'] = ','.join(call_fields) 152 | 153 | print("Getting Calls...") 154 | calls = get_all("activity/call", params=params) 155 | 156 | # Add lead names and formatted costs 157 | for call in calls: 158 | call['lead_name'] = lead_id_to_name.get(call.get('lead_id'), '') 159 | call['contact_name'] = contacts_id_to_name.get(call.get('contact_id'), '') 160 | 161 | if call.get('cost'): 162 | call['formatted_cost'] = f"${(float(call['cost']) / 100)}" 163 | if call.get('recording_transcript'): 164 | call['recording_transcript'] = call.get('recording_transcript').get('summary_text') 165 | 166 | # Filter calls 167 | if args.missed_or_voicemail: 168 | calls = [i for i in calls if i['duration'] == 0] 169 | 170 | if args.direction: 171 | calls = [i for i in calls if i['direction'] == args.direction] 172 | 173 | if args.phone_number: 174 | calls = [i for i in calls if i['local_phone'] == args.phone_number] 175 | 176 | # Write to CSV 177 | organization = api.get('me')['organizations'][0] 178 | organization_name = organization['name'].replace('/', "") 179 | file_name = f'{organization_name} Calls.csv' 180 | 181 | with open(file_name, 'w', newline='', encoding='utf-8') as f: 182 | keys = call_fields + ['lead_name', 'contact_name'] 183 | if args.call_costs: 184 | keys += ['formatted_cost'] 185 | writer = csv.DictWriter(f, keys) 186 | writer.writeheader() 187 | writer.writerows(calls) 188 | 189 | print(f'Done! Report is saved to `{file_name}`') -------------------------------------------------------------------------------- /scripts/export_sequence_subscriptions_public.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import math 4 | 5 | import gevent.monkey 6 | 7 | gevent.monkey.patch_all() 8 | 9 | from closeio_api import Client as CloseApi 10 | from gevent.pool import Pool 11 | 12 | pool = Pool(10) 13 | 14 | arg_parser = argparse.ArgumentParser(description="Download a CSV of email sequence subscriptions") 15 | arg_parser.add_argument("--api-key", "-k", required=True, help="API Key") 16 | arg_parser.add_argument("--sequence-id", help="Fetch only subscriptions from this Sequence ID") 17 | args = arg_parser.parse_args() 18 | 19 | api = CloseApi(args.api_key) 20 | 21 | csv_data = [] 22 | 23 | 24 | def get_sequences(): 25 | sequences = [] 26 | 27 | has_more = True 28 | offset = 0 29 | while has_more: 30 | resp = api.get('sequence') 31 | sequences.extend(resp['data']) 32 | offset += len(resp['data']) 33 | has_more = resp['has_more'] 34 | 35 | return sequences 36 | 37 | 38 | sequences = get_sequences() 39 | 40 | query = "contact(sequence_subscription(sequence:*)) " 41 | 42 | # Get the total number of slices 43 | total_leads = api.get('lead', params={'_limit': 0, 'query': query})['total_results'] 44 | total_slices = int(math.ceil(float(total_leads) / 1000)) 45 | slices = range(1, total_slices + 1) 46 | 47 | 48 | def get_leads_slice(slice_index): 49 | print(f"Getting lead slice {slice_index} of {total_slices}...") 50 | has_more = True 51 | offset = 0 52 | while has_more: 53 | resp = api.get( 54 | 'lead', 55 | params={ 56 | '_skip': offset, 57 | 'query': f'sort:created slice:{slice_index}/{total_slices}', 58 | '_fields': 'id' 59 | }, 60 | ) 61 | leads.extend(resp['data']) 62 | 63 | offset += len(resp['data']) 64 | has_more = resp['has_more'] 65 | 66 | 67 | leads = [] 68 | pool.map(get_leads_slice, slices) 69 | 70 | 71 | def fetch_sequence_subscriptions(lead): 72 | params = {"lead_id": lead["id"]} 73 | 74 | if args.sequence_id: 75 | params["sequence_id"] = args.sequence_id 76 | 77 | def get_sequence_subscriptions(params): 78 | subscriptions = [] 79 | 80 | has_more = True 81 | offset = 0 82 | while has_more: 83 | resp = api.get('sequence_subscription', params=params) 84 | subscriptions.extend(resp['data']) 85 | offset += len(resp['data']) 86 | has_more = resp['has_more'] 87 | 88 | return subscriptions 89 | 90 | all_subs.extend(get_sequence_subscriptions(params)) 91 | 92 | 93 | all_subs = [] 94 | pool.map(fetch_sequence_subscriptions, leads) 95 | 96 | sequence_names = dict(zip([x["id"] for x in sequences], [x["name"] for x in sequences])) 97 | for subscription in all_subs: 98 | csv_data.append( 99 | { 100 | "id": subscription["id"], 101 | "sequence_id": subscription["sequence_id"], 102 | "sequence_name": sequence_names.get(subscription["sequence_id"]), 103 | "contact_id": subscription["contact_id"], 104 | "contact_email": subscription["contact_email"], 105 | "sender_account_id": subscription["sender_account_id"], 106 | "sender_email": subscription["sender_email"], 107 | "sender_name": subscription["sender_name"], 108 | "status": subscription["status"], 109 | "pause_reason": subscription["pause_reason"], 110 | } 111 | ) 112 | 113 | keys = [ 114 | "id", 115 | "sequence_id", 116 | "sequence_name", 117 | "contact_id", 118 | "contact_email", 119 | "sender_account_id", 120 | "sender_email", 121 | "sender_name", 122 | "status", 123 | "pause_reason", 124 | ] 125 | 126 | org_name = api.get("me")["organizations"][0]['name'] 127 | with open(f"{org_name} - Sequence subscriptions.csv", "wt") as f: 128 | writer = csv.DictWriter(f, keys) 129 | writer.writeheader() 130 | writer.writerows(csv_data) 131 | -------------------------------------------------------------------------------- /scripts/export_sequences_data.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | 4 | import gevent.monkey 5 | 6 | gevent.monkey.patch_all() 7 | from closeio_api import Client as CloseIO_API 8 | from gevent.pool import Pool 9 | 10 | parser = argparse.ArgumentParser( 11 | description='Download a CSV of email sequences and their subscription counts (number of active/paused/finished subscriptions)' 12 | ) 13 | 14 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 15 | args = parser.parse_args() 16 | 17 | api = CloseIO_API(args.api_key) 18 | org_name = api.get('me')['organizations'][0]['name'] 19 | 20 | print('Getting email sequences...') 21 | 22 | params = {'_fields': 'id'} 23 | has_more = True 24 | offset = 0 25 | sequence_ids = [] 26 | while has_more: 27 | params['_skip'] = offset 28 | resp = api.get('sequence', params=params) 29 | for sequence in resp['data']: 30 | sequence_ids.append(sequence['id']) 31 | offset += len(resp['data']) 32 | has_more = resp['has_more'] 33 | 34 | print(f'Found {len(sequence_ids)} email sequences. Getting their details...') 35 | 36 | 37 | def fetch_sequence(sequence_id): 38 | resp = api.get(f'sequence/{sequence_id}') 39 | 40 | counts_by_status = resp['subscription_counts_by_status'] 41 | active_subscriptions = counts_by_status['active'] 42 | paused_subscriptions = counts_by_status['paused'] 43 | finished_subscriptions = counts_by_status['finished'] 44 | total_subscriptions = ( 45 | active_subscriptions + paused_subscriptions + finished_subscriptions 46 | ) 47 | 48 | sequences.append( 49 | { 50 | 'id': resp['id'], 51 | 'name': resp['name'], 52 | 'is_active': resp['status'] == 'active', 53 | 'total_subscriptions': total_subscriptions, 54 | 'active_subscriptions': active_subscriptions, 55 | 'paused_subscriptions': paused_subscriptions, 56 | 'finished_subscriptions': finished_subscriptions, 57 | } 58 | ) 59 | 60 | 61 | sequences = [] 62 | 63 | pool = Pool(5) 64 | pool.map(fetch_sequence, sequence_ids) 65 | 66 | file_name = f'{org_name.replace("/", " ")} Email Sequences.csv' 67 | print(f'Exporting to `{file_name}`') 68 | 69 | f = open(file_name, 'w', newline='', encoding='utf-8') 70 | try: 71 | keys = [ 72 | 'id', 73 | 'name', 74 | 'is_active', 75 | 'total_subscriptions', 76 | 'active_subscriptions', 77 | 'paused_subscriptions', 78 | 'finished_subscriptions', 79 | ] 80 | writer = csv.DictWriter(f, keys) 81 | writer.writeheader() 82 | writer.writerows(sequences) 83 | finally: 84 | f.close() 85 | -------------------------------------------------------------------------------- /scripts/export_sms.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import math 4 | 5 | import gevent.monkey 6 | 7 | gevent.monkey.patch_all() 8 | from gevent.pool import Pool 9 | 10 | pool = Pool(7) 11 | 12 | from closeio_api import Client as CloseApi 13 | 14 | arg_parser = argparse.ArgumentParser(description="Download a CSV of SMS messages over a specified time range") 15 | arg_parser.add_argument("--api-key", "-k", required=True, help="API Key") 16 | arg_parser.add_argument( 17 | "--start-date", 18 | "-s", 19 | help="The start of the date range you want to export SMS data for in yyyy-mm-dd format (inclusive).", 20 | ) 21 | arg_parser.add_argument( 22 | "--end-date", 23 | "-e", 24 | help="The end of the date range you want to export SMS data for in yyyy-mm-dd format (exclusive).", 25 | ) 26 | arg_parser.add_argument( 27 | "--user", 28 | "-u", 29 | help="Use this field if you only want to find SMS for a specific users - enter email, ID, or name", 30 | ) 31 | arg_parser.add_argument( 32 | "--direction", 33 | "-d", 34 | default=None, 35 | choices=["inbound", "outbound"], 36 | help="Use this field to only export inbound SMS or outbound SMS. Leave this field blank to export both.", 37 | ) 38 | arg_parser.add_argument( 39 | "--status", 40 | default=None, 41 | choices=["error", "inbox", "draft", "scheduled", "outbox", "sent"], 42 | help="Use this field to only export SMS in specific status.", 43 | ) 44 | arg_parser.add_argument("--smart-view", help="Export SMS messages only for leads in a specific Smart View") 45 | args = arg_parser.parse_args() 46 | 47 | api = CloseApi(args.api_key) 48 | 49 | organization = api.get("me")["organizations"][0] 50 | 51 | sms_messages_fields = ['id', 'direction', 'local_phone', 'remote_phone', 'lead_id', 'contact_id', 'user_id', 52 | 'user_name', 'date_created', 'text', 'status', 'cost', 'source'] 53 | sms_messages_params = { 54 | "_fields": ','.join(sms_messages_fields) 55 | } 56 | 57 | if args.user: 58 | def get_membership(user_identifier): 59 | resp = api.get(f"organization/{organization['id']}", params={"_fields": "memberships,inactive_memberships"}) 60 | memberships = resp["memberships"] + resp["inactive_memberships"] 61 | 62 | if user_identifier.startswith("user_"): 63 | return next(iter(x for x in memberships if x["user_id"] == user_identifier), None) 64 | elif "@" in user_identifier: 65 | return next(iter(x for x in memberships if x["user_email"] == user_identifier), None) 66 | else: 67 | return next( 68 | iter(x for x in memberships if x["user_full_name"] == user_identifier), 69 | None, 70 | ) 71 | 72 | 73 | user = get_membership(args.user) 74 | if not user: 75 | print(f"Couldn't find user `{args.user}` in organization `{organization['name']}`") 76 | exit() 77 | 78 | sms_messages_params["user_id"] = user["user_id"] 79 | else: 80 | user = None 81 | 82 | query = "" 83 | 84 | if args.start_date: 85 | query += f' date >= "{args.start_date}"' 86 | 87 | if args.end_date: 88 | query += f' date < "{args.end_date}"' 89 | 90 | if args.status: 91 | query += f" status:{args.status} " 92 | 93 | if args.direction: 94 | query += f" direction:{args.direction} " 95 | 96 | if user: 97 | query += f" user:{user['user_id']} " 98 | 99 | if query: 100 | query = f"sms({query})" 101 | else: 102 | query = "sms_messages > 0" 103 | 104 | if args.smart_view: 105 | query += f' in:"{args.smart_view}"' 106 | 107 | print("Getting Leads...") 108 | print(f'\t{query}') 109 | 110 | def get_all(url, params=None): 111 | if params is None: 112 | params = {} 113 | 114 | items = [] 115 | has_more = True 116 | offset = 0 117 | while has_more: 118 | params["_skip"] = offset 119 | resp = api.get(url, params=params) 120 | items.extend(resp['data']) 121 | offset += len(resp["data"]) 122 | has_more = resp["has_more"] 123 | return items 124 | 125 | 126 | def get_all_leads_with_slices(params, slice_size=500): 127 | leads = [] 128 | 129 | total_leads = api.get("lead", params={"_limit": 0, "query": params["query"], "_fields": "id"})[ 130 | "total_results"] 131 | total_slices = int(math.ceil(float(total_leads) / slice_size)) 132 | 133 | slices = [] 134 | for slice_number in range(1, total_slices + 1): 135 | slices.append({"total_slices": total_slices, "slice": slice_number, "params": params}) 136 | 137 | def _get_all_leads_slice(slice_obj): 138 | params = slice_obj["params"] 139 | 140 | new_params = params.copy() 141 | new_params["query"] = f'({params["query"]}) slice:{slice_obj["slice"]}/{slice_obj["total_slices"]}' 142 | 143 | leads.extend(get_all("lead", params=new_params)) 144 | 145 | pool.map(_get_all_leads_slice, slices) 146 | 147 | return leads 148 | 149 | 150 | leads = get_all_leads_with_slices(params={"query": query, "_fields": "id,display_name"}) 151 | 152 | lead_id_to_name = {} 153 | for lead in leads: 154 | lead_id_to_name[lead["id"]] = lead["display_name"] 155 | 156 | print("Getting SMS messages...") 157 | 158 | 159 | def get_sms_messages_for_lead(lead): 160 | sms_params = sms_messages_params.copy() 161 | sms_params["lead_id"] = lead["id"] 162 | 163 | if args.start_date: 164 | sms_params["date_created__gt"] = args.start_date 165 | if args.end_date: 166 | sms_params["date_created__lt"] = args.end_date 167 | 168 | sms_messages.extend(get_all("activity/sms", params=sms_params)) 169 | 170 | 171 | sms_messages = [] 172 | pool.map(get_sms_messages_for_lead, leads) 173 | 174 | # Sort by newest first 175 | sms_messages.sort(key=lambda x: x["date_created"], reverse=True) 176 | 177 | if args.direction: 178 | sms_messages = [i for i in sms_messages if i["direction"] == args.direction] 179 | 180 | if args.status: 181 | sms_messages = [i for i in sms_messages if i["status"] == args.status] 182 | 183 | for sms_message in sms_messages: 184 | sms_message["lead_name"] = lead_id_to_name.get(sms_message.get("lead_id"), "") 185 | 186 | if sms_message.get("cost"): 187 | sms_message["formatted_cost"] = f"${(float(sms_message['cost']) / 100)}" 188 | 189 | # Write to CSV 190 | file_name = f"{organization['name']} SMS messages.csv" 191 | 192 | with open(file_name, 'w', newline='', encoding='utf-8') as f: 193 | writer = csv.DictWriter(f, sms_messages_fields + ['lead_name', 'formatted_cost']) 194 | writer.writeheader() 195 | writer.writerows(sms_messages) 196 | 197 | print(f'Done! Report is saved to `{file_name}`') 198 | -------------------------------------------------------------------------------- /scripts/find_contact_duplicates_on_single_lead.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import math 4 | from operator import itemgetter 5 | 6 | import gevent.monkey 7 | from closeio_api import Client as CloseIO_API 8 | from gevent.pool import Pool 9 | 10 | gevent.monkey.patch_all() 11 | 12 | pool = Pool(7) 13 | 14 | parser = argparse.ArgumentParser( 15 | description='Find duplicate contacts on a lead in your Close org via contact_name, email address, or phone number' 16 | ) 17 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 18 | parser.add_argument( 19 | '--field', 20 | '-f', 21 | default='all', 22 | choices=['contact_name', 'email', 'phone', 'all'], 23 | required=False, 24 | help="Specify a field to compare uniqueness", 25 | ) 26 | args = parser.parse_args() 27 | 28 | # Initialize Close API Wrapper 29 | api = CloseIO_API(args.api_key) 30 | org_name = api.get('me')['organizations'][0]['name'].replace('/', '') 31 | 32 | # Calculate number of slices necessary to get all leads 33 | total_leads = api.get( 34 | 'lead', params={'_limit': 0, 'query': 'sort:created contacts > 1'} 35 | )['total_results'] 36 | total_slices = int(math.ceil(float(total_leads) / 1000)) 37 | slices = range(1, total_slices + 1) 38 | leads = [] 39 | 40 | 41 | # Write data to a CSV 42 | def writeCSV(type_name, items, ordered_keys): 43 | print(f"Writing {type_name} data to CSV...") 44 | f = open( 45 | f'{org_name} {type_name} Duplicates on a Single Lead.csv', 46 | 'w', 47 | newline='', 48 | encoding='utf-8', 49 | ) 50 | try: 51 | writer = csv.DictWriter(f, ordered_keys) 52 | writer.writeheader() 53 | writer.writerows(items) 54 | finally: 55 | f.close() 56 | 57 | # Get leads for each slice 58 | 59 | 60 | def getLeadsSlice(slice_num): 61 | print(f"Getting lead slice {slice_num} of {total_slices}...") 62 | has_more = True 63 | offset = 0 64 | while has_more: 65 | resp = api.get( 66 | 'lead', 67 | params={ 68 | '_skip': offset, 69 | 'query': 'sort:created slice:%s/%s contacts > 1' 70 | % (slice_num, total_slices), 71 | '_fields': 'id,display_name,contacts,date_created', 72 | }, 73 | ) 74 | for lead in resp['data']: 75 | leads.append(lead) 76 | offset += len(resp['data']) 77 | has_more = resp['has_more'] 78 | 79 | 80 | # Add to a list of duplicates for contact names 81 | def getDuplicatesForContactName(contact_name): 82 | for dupe in contact_names[contact_name]: 83 | contact_name_duplicates.append( 84 | { 85 | 'Contact Name': dupe['display_name'], 86 | 'Lead Name': dupe['lead_name'], 87 | 'Contact ID': dupe['id'], 88 | 'Lead ID': dupe['lead_id'], 89 | 'Close URL': 'https://app.close.com/lead/%s/' 90 | % dupe['lead_id'], 91 | } 92 | ) 93 | 94 | 95 | # Add to a list of duplicates for contact emails 96 | def getDuplicatesForEmail(email): 97 | for dupe in emails[email]: 98 | email_duplicates.append( 99 | { 100 | 'Email Address': email, 101 | 'Contact Name': dupe['display_name'], 102 | 'Lead Name': dupe['lead_name'], 103 | 'Contact ID': dupe['id'], 104 | 'Lead ID': dupe['lead_id'], 105 | 'Close URL': 'https://app.close.com/lead/%s/' 106 | % dupe['lead_id'], 107 | } 108 | ) 109 | 110 | 111 | # Add to a list of duplicates for contact phones 112 | def getDuplicatesForPhone(phone): 113 | for dupe in phones[phone]: 114 | phone_duplicates.append( 115 | { 116 | 'Phone Number': phone, 117 | 'Contact Name': dupe['display_name'], 118 | 'Lead Name': dupe['lead_name'], 119 | 'Contact ID': dupe['id'], 120 | 'Lead ID': dupe['lead_id'], 121 | 'Close URL': 'https://app.close.com/lead/%s/' 122 | % dupe['lead_id'], 123 | } 124 | ) 125 | 126 | 127 | print("Getting Leads...") 128 | pool.map(getLeadsSlice, slices) 129 | leads = sorted(leads, key=itemgetter('date_created')) 130 | 131 | # Process duplicates 132 | contact_name_duplicates = [] 133 | email_duplicates = [] 134 | phone_duplicates = [] 135 | print("Processing contacts on each lead...") 136 | 137 | for lead in leads: 138 | contact_names = {} 139 | emails = {} 140 | phones = {} 141 | keys_with_dupes_contact_name = [] 142 | keys_with_dupes_email = [] 143 | keys_with_dupes_phone = [] 144 | for contact in lead['contacts']: 145 | contact['lead_name'] = lead['display_name'] 146 | # Pouplate a dictionary of duplicate contact names, and keep track of those that appear more than once 147 | if args.field in ['all', 'contact_name']: 148 | lower_name = contact['display_name'].strip().lower() 149 | if ( 150 | contact_names.get(lower_name) 151 | and contact not in contact_names[lower_name] 152 | ): 153 | contact_names[lower_name].append(contact) 154 | keys_with_dupes_contact_name.append(lower_name) 155 | elif not contact_names.get(lower_name): 156 | contact_names[lower_name] = [contact] 157 | 158 | # Populate a dictionary of emails, and keep track of those that appear more than once 159 | if args.field in ['all', 'email']: 160 | for email in contact['emails']: 161 | if ( 162 | emails.get(email['email']) 163 | and contact not in emails[email['email']] 164 | ): 165 | emails[email['email']].append(contact) 166 | keys_with_dupes_email.append(email['email']) 167 | elif not emails.get(email['email']): 168 | emails[email['email']] = [contact] 169 | 170 | # Populate a dictionary of phones, and keep track of those that appear more than once 171 | if args.field in ['all', 'phone']: 172 | for phone in contact['phones']: 173 | if ( 174 | phones.get(phone['phone']) 175 | and contact not in phones[phone['phone']] 176 | ): 177 | phones[phone['phone']].append(contact) 178 | keys_with_dupes_phone.append(phone['phone']) 179 | elif not phones.get(phone['phone']): 180 | phones[phone['phone']] = [contact] 181 | 182 | # Write data to appropriate arrays 183 | if args.field in ['all', 'contact_name']: 184 | if len(keys_with_dupes_contact_name) > 0: 185 | keys_with_dupes_contact_name = list( 186 | set(keys_with_dupes_contact_name) 187 | ) 188 | pool.map(getDuplicatesForContactName, keys_with_dupes_contact_name) 189 | 190 | if args.field in ['all', 'email']: 191 | if len(keys_with_dupes_email) > 0: 192 | keys_with_dupes_email = list(set(keys_with_dupes_email)) 193 | pool.map(getDuplicatesForEmail, keys_with_dupes_email) 194 | 195 | if args.field in ['all', 'phone']: 196 | if len(keys_with_dupes_phone) > 0: 197 | keys_with_dupes_phone = list(set(keys_with_dupes_phone)) 198 | pool.map(getDuplicatesForPhone, keys_with_dupes_phone) 199 | 200 | print(f"{(leads.index(lead) + 1)} of {len(leads)}: {lead['id']}") 201 | 202 | if args.field in ['all', 'contact_name']: 203 | # Sort the duplicates alphabetically by lead name and then contact name and write them to a CSV 204 | contact_name_duplicates = sorted( 205 | contact_name_duplicates, key=itemgetter('Lead ID', 'Contact Name') 206 | ) 207 | writeCSV( 208 | "Contact Name", 209 | contact_name_duplicates, 210 | ['Contact Name', 'Lead Name', 'Contact ID', 'Lead ID', 'Close URL'], 211 | ) 212 | 213 | if args.field in ['all', 'email']: 214 | email_duplicates = sorted( 215 | email_duplicates, key=itemgetter('Lead ID', 'Email Address') 216 | ) 217 | writeCSV( 218 | "Email", 219 | email_duplicates, 220 | [ 221 | 'Email Address', 222 | 'Contact Name', 223 | 'Lead Name', 224 | 'Contact ID', 225 | 'Lead ID', 226 | 'Close URL', 227 | ], 228 | ) 229 | 230 | if args.field in ['all', 'phone']: 231 | phone_duplicates = sorted( 232 | phone_duplicates, key=itemgetter('Lead ID', 'Phone Number') 233 | ) 234 | writeCSV( 235 | "Phone", 236 | phone_duplicates, 237 | [ 238 | 'Phone Number', 239 | 'Contact Name', 240 | 'Lead Name', 241 | 'Contact ID', 242 | 'Lead ID', 243 | 'Close URL', 244 | ], 245 | ) 246 | -------------------------------------------------------------------------------- /scripts/find_duplicate_leads.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import math 4 | from operator import itemgetter 5 | 6 | import gevent.monkey 7 | 8 | gevent.monkey.patch_all() 9 | from urllib.parse import urlparse 10 | from closeio_api import Client as CloseIO_API 11 | from gevent.pool import Pool 12 | 13 | pool = Pool(7) 14 | 15 | parser = argparse.ArgumentParser( 16 | description='Find duplicate leads in your Close org via lead name, email address, phone number, or lead url hostname' 17 | ) 18 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 19 | parser.add_argument( 20 | '--field', 21 | '-f', 22 | default='all', 23 | choices=[ 24 | 'lead_name', 25 | 'contact_name', 26 | 'email', 27 | 'phone', 28 | 'url', 29 | 'all', 30 | 'custom', 31 | ], 32 | help="Specify a field to compare uniqueness", 33 | ) 34 | parser.add_argument( 35 | '--custom-field-name', 36 | '-c', 37 | help="Specify the custom field name if you're deduplicating by `custom` field", 38 | ) 39 | args = parser.parse_args() 40 | 41 | # Initialize Close API Wrapper 42 | api = CloseIO_API(args.api_key) 43 | organization = api.get('me')['organizations'][0] 44 | org_id = organization['id'] 45 | org_name = organization['name'] 46 | 47 | # Calculate number of slices necessary to get all leads 48 | total_leads = api.get('lead', params={'_limit': 0, 'query': 'sort:created'})[ 49 | 'total_results' 50 | ] 51 | total_slices = int(math.ceil(float(total_leads) / 1000)) 52 | slices = range(1, total_slices + 1) 53 | leads = [] 54 | 55 | 56 | # Write data to a CSV 57 | def write_to_csv_file(type_name, items, ordered_keys): 58 | print("Writing data to CSV...") 59 | f = open( 60 | f'{org_name.replace("/", " ")} {type_name} Duplicates.csv', 61 | 'w', 62 | newline='', 63 | encoding='utf-8', 64 | ) 65 | try: 66 | writer = csv.DictWriter(f, ordered_keys) 67 | writer.writeheader() 68 | writer.writerows(items) 69 | finally: 70 | f.close() 71 | 72 | 73 | # Get leads for each slice 74 | lead_params_fields = [ 75 | 'id', 76 | 'display_name', 77 | 'contacts', 78 | 'status_label', 79 | 'date_created', 80 | 'url', 81 | ] 82 | if args.field == 'custom': 83 | lead_params_fields += ['custom'] 84 | 85 | if not args.custom_field_name: 86 | print( 87 | f"You need to provide custom field name while deduplicating by `custom`. Exiting..." 88 | ) 89 | exit(1) 90 | 91 | 92 | def get_leads_slice(slice_num): 93 | print(f"Getting lead slice {slice_num} of {total_slices}...") 94 | has_more = True 95 | offset = 0 96 | while has_more: 97 | resp = api.get( 98 | 'lead', 99 | params={ 100 | '_skip': offset, 101 | 'query': 'sort:created slice:%s/%s' 102 | % (slice_num, total_slices), 103 | '_fields': ','.join(lead_params_fields), 104 | }, 105 | ) 106 | leads.extend(resp['data']) 107 | 108 | offset += len(resp['data']) 109 | has_more = resp['has_more'] 110 | 111 | 112 | # Add to a list of duplicates for lead names 113 | def get_duplicates_for_lead_name(lead_name): 114 | for dupe in lead_names[lead_name]: 115 | lead_name_duplicates.append( 116 | { 117 | 'Lead Name': dupe['display_name'], 118 | 'Status Label': dupe['status_label'], 119 | 'Lead ID': dupe['id'], 120 | 'Lead Date Created': dupe['date_created'], 121 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 122 | } 123 | ) 124 | print( 125 | f"{(keys_with_dupes_lead_name.index(lead_name) + 1)} of {len(keys_with_dupes_lead_name)}: {lead_name}" 126 | ) 127 | 128 | 129 | def get_duplicates_for_custom_field(custom_field_value): 130 | custom_field_name = args.custom_field_name 131 | 132 | for dupe in custom_fields[custom_field_value]: 133 | custom_field_duplicates.append( 134 | { 135 | f'custom.{custom_field_name}': custom_field_value, 136 | 'Lead Name': dupe['display_name'], 137 | 'Status Label': dupe['status_label'], 138 | 'Lead ID': dupe['id'], 139 | 'Lead Date Created': dupe['date_created'], 140 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 141 | } 142 | ) 143 | print( 144 | f"{(keys_with_dupes_custom_field.index(custom_field_value) + 1)} of {len(keys_with_dupes_custom_field)}: {custom_field_value}" 145 | ) 146 | 147 | 148 | # Add to a list of duplicates for contact emails 149 | def get_duplicates_for_email(email): 150 | for dupe in emails[email]: 151 | email_duplicates.append( 152 | { 153 | 'Email Address': email, 154 | 'Lead Name': dupe['display_name'], 155 | 'Status Label': dupe['status_label'], 156 | 'Lead ID': dupe['id'], 157 | 'Lead Date Created': dupe['date_created'], 158 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 159 | } 160 | ) 161 | print( 162 | f"{(keys_with_dupes_email.index(email) + 1)} of {len(keys_with_dupes_email)}: {email}" 163 | ) 164 | 165 | 166 | # Add to a list of duplicates for contact emails 167 | def get_duplicates_for_contact_name(contact_name): 168 | for dupe in contact_names[contact_name]: 169 | contact_name_duplicates.append( 170 | { 171 | 'Contact Name': contact_name, 172 | 'Lead Name': dupe['display_name'], 173 | 'Status Label': dupe['status_label'], 174 | 'Lead ID': dupe['id'], 175 | 'Lead Date Created': dupe['date_created'], 176 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 177 | } 178 | ) 179 | print( 180 | f"{(keys_with_dupes_contact_name.index(contact_name) + 1)} of {len(keys_with_dupes_contact_name)}: {contact_name}" 181 | ) 182 | 183 | 184 | # Add to a list of duplicates for contact phones 185 | def get_duplicates_for_phone(phone): 186 | for dupe in phones[phone]: 187 | phone_duplicates.append( 188 | { 189 | 'Phone Number': phone, 190 | 'Lead Name': dupe['display_name'], 191 | 'Status Label': dupe['status_label'], 192 | 'Lead ID': dupe['id'], 193 | 'Lead Date Created': dupe['date_created'], 194 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 195 | } 196 | ) 197 | print( 198 | f"{(keys_with_dupes_phone.index(phone) + 1)} of {len(keys_with_dupes_phone)}: {phone}" 199 | ) 200 | 201 | 202 | # Add to a list of duplicates for lead URLs 203 | def get_duplicates_for_url(url): 204 | for dupe in urls[url]: 205 | url_duplicates.append( 206 | { 207 | 'URL Hostname': url, 208 | 'Lead Name': dupe['display_name'], 209 | 'Status Label': dupe['status_label'], 210 | 'Lead ID': dupe['id'], 211 | 'Lead Date Created': dupe['date_created'], 212 | 'Close URL': 'https://app.close.com/lead/%s/' % dupe['id'], 213 | } 214 | ) 215 | print( 216 | f"{(keys_with_dupes_url.index(url) + 1)} of {len(keys_with_dupes_url)}: {url}" 217 | ) 218 | 219 | 220 | print("Getting Leads...") 221 | pool.map(get_leads_slice, slices) 222 | leads = sorted(leads, key=itemgetter('date_created')) 223 | 224 | # Process duplicates 225 | lead_names = {} 226 | custom_fields = {} 227 | contact_names = {} 228 | emails = {} 229 | phones = {} 230 | urls = {} 231 | keys_with_dupes_lead_name = [] 232 | keys_with_dupes_custom_field = [] 233 | keys_with_dupes_contact_name = [] 234 | keys_with_dupes_email = [] 235 | keys_with_dupes_phone = [] 236 | keys_with_dupes_url = [] 237 | 238 | for lead in leads: 239 | if args.field in ['all', 'lead_name']: 240 | # Pouplate a dictionary of duplicate lead names, and keep track of those that appear more than once 241 | lower_name = lead['display_name'].strip().lower() 242 | if lead_names.get(lower_name) and lead not in lead_names[lower_name]: 243 | lead_names[lower_name].append(lead) 244 | keys_with_dupes_lead_name.append(lower_name) 245 | elif not lead_names.get(lower_name): 246 | lead_names[lower_name] = [lead] 247 | 248 | if args.field == 'custom': 249 | custom_field_value = lead['custom'].get(args.custom_field_name) 250 | if isinstance(custom_field_value, list): 251 | custom_field_value = ','.join(custom_field_value) 252 | 253 | if custom_field_value: 254 | if ( 255 | custom_fields.get(custom_field_value) 256 | and lead not in custom_fields[custom_field_value] 257 | ): 258 | custom_fields[custom_field_value].append(lead) 259 | keys_with_dupes_custom_field.append(custom_field_value) 260 | elif not custom_fields.get(custom_field_value): 261 | custom_fields[custom_field_value] = [lead] 262 | 263 | if args.field in ['all', 'url']: 264 | # Pouplate a dictionary of duplicate lead urls, and keep track of those that appear more than once 265 | if lead.get('url'): 266 | host_name = urlparse(lead['url']).hostname.lower() 267 | if urls.get(host_name) and lead not in urls[host_name]: 268 | urls[host_name].append(lead) 269 | keys_with_dupes_url.append(host_name) 270 | elif not urls.get(host_name): 271 | urls[host_name] = [lead] 272 | 273 | if args.field in ['all', 'email', 'phone', 'contact_name']: 274 | for contact in lead['contacts']: 275 | if args.field in ['all', 'contact_name']: 276 | if not contact['name']: 277 | continue 278 | 279 | contact_name = contact['name'].strip().lower() 280 | if ( 281 | contact_names.get(contact_name) 282 | and lead not in contact_names[contact_name] 283 | ): 284 | contact_names[contact_name].append(lead) 285 | keys_with_dupes_contact_name.append(contact_name) 286 | elif not contact_names.get(contact_name): 287 | contact_names[contact_name] = [lead] 288 | 289 | # Populate a dictionary of emails, and keep track of those that appear more than once 290 | if args.field in ['all', 'email']: 291 | for email in contact['emails']: 292 | if ( 293 | emails.get(email['email']) 294 | and lead not in emails[email['email']] 295 | ): 296 | emails[email['email']].append(lead) 297 | keys_with_dupes_email.append(email['email']) 298 | elif not emails.get(email['email']): 299 | emails[email['email']] = [lead] 300 | 301 | # Populate a dictionary of phones, and keep track of those that appear more than once 302 | if args.field in ['all', 'phone']: 303 | for phone in contact['phones']: 304 | if ( 305 | phones.get(phone['phone']) 306 | and lead not in phones[phone['phone']] 307 | ): 308 | phones[phone['phone']].append(lead) 309 | keys_with_dupes_phone.append(phone['phone']) 310 | elif not phones.get(phone['phone']): 311 | phones[phone['phone']] = [lead] 312 | 313 | if args.field in ['all', 'lead_name']: 314 | lead_name_duplicates = [] 315 | print("Getting lead name duplicate data...") 316 | keys_with_dupes_lead_name = list(set(keys_with_dupes_lead_name)) 317 | pool.map(get_duplicates_for_lead_name, keys_with_dupes_lead_name) 318 | 319 | # Sort the duplicates alphabetically and write them to a CSV 320 | lead_name_duplicates = sorted( 321 | lead_name_duplicates, key=itemgetter('Lead Name') 322 | ) 323 | write_to_csv_file( 324 | "Lead Name", 325 | lead_name_duplicates, 326 | [ 327 | 'Lead Name', 328 | 'Status Label', 329 | 'Lead Date Created', 330 | 'Lead ID', 331 | 'Close URL', 332 | ], 333 | ) 334 | 335 | if args.field == 'custom': 336 | custom_field_name = args.custom_field_name 337 | 338 | custom_field_duplicates = [] 339 | print(f"Getting custom field `{custom_field_name}` duplicate data...") 340 | keys_with_dupes_lead_name = list(set(keys_with_dupes_custom_field)) 341 | pool.map(get_duplicates_for_custom_field, keys_with_dupes_custom_field) 342 | 343 | # Sort the duplicates alphabetically and write them to a CSV 344 | custom_field_duplicates = sorted( 345 | custom_field_duplicates, key=itemgetter(f'custom.{custom_field_name}') 346 | ) 347 | write_to_csv_file( 348 | f'Custom - {custom_field_name}', 349 | custom_field_duplicates, 350 | [ 351 | f'custom.{custom_field_name}', 352 | 'Lead Name', 353 | 'Status Label', 354 | 'Lead Date Created', 355 | 'Lead ID', 356 | 'Close URL', 357 | ], 358 | ) 359 | 360 | if args.field in ['all', 'email']: 361 | email_duplicates = [] 362 | print("Getting email duplicate data...") 363 | keys_with_dupes_email = list(set(keys_with_dupes_email)) 364 | pool.map(get_duplicates_for_email, keys_with_dupes_email) 365 | 366 | # Sort the duplicates alphabetically and write them to a CSV 367 | email_duplicates = sorted( 368 | email_duplicates, key=itemgetter('Email Address') 369 | ) 370 | write_to_csv_file( 371 | "Email", 372 | email_duplicates, 373 | [ 374 | 'Email Address', 375 | 'Lead Name', 376 | 'Status Label', 377 | 'Lead Date Created', 378 | 'Lead ID', 379 | 'Close URL', 380 | ], 381 | ) 382 | 383 | if args.field in ['all', 'contact_name']: 384 | contact_name_duplicates = [] 385 | print("Getting contact duplicate data...") 386 | keys_with_dupes_contact_name = list(set(keys_with_dupes_contact_name)) 387 | pool.map(get_duplicates_for_contact_name, keys_with_dupes_contact_name) 388 | 389 | # Sort the duplicates alphabetically and write them to a CSV 390 | contact_name_duplicates = sorted( 391 | contact_name_duplicates, key=itemgetter('Contact Name') 392 | ) 393 | write_to_csv_file( 394 | "Contact Name", 395 | contact_name_duplicates, 396 | [ 397 | 'Contact Name', 398 | 'Lead Name', 399 | 'Status Label', 400 | 'Lead Date Created', 401 | 'Lead ID', 402 | 'Close URL', 403 | ], 404 | ) 405 | 406 | if args.field in ['all', 'phone']: 407 | phone_duplicates = [] 408 | print("Getting phone duplicate data...") 409 | keys_with_dupes_phone = list(set(keys_with_dupes_phone)) 410 | pool.map(get_duplicates_for_phone, keys_with_dupes_phone) 411 | 412 | # Sort the duplicates alphabetically and write them to a CSV 413 | phone_duplicates = sorted(phone_duplicates, key=itemgetter('Phone Number')) 414 | write_to_csv_file( 415 | "Phone", 416 | phone_duplicates, 417 | [ 418 | 'Phone Number', 419 | 'Lead Name', 420 | 'Status Label', 421 | 'Lead Date Created', 422 | 'Lead ID', 423 | 'Close URL', 424 | ], 425 | ) 426 | 427 | if args.field in ['all', 'url']: 428 | url_duplicates = [] 429 | print("Getting URL duplicate data...") 430 | keys_with_dupes_url = list(set(keys_with_dupes_url)) 431 | pool.map(get_duplicates_for_url, keys_with_dupes_url) 432 | 433 | # Sort the duplicates alphabetically and write them to a CSV 434 | url_duplicates = sorted(url_duplicates, key=itemgetter('URL Hostname')) 435 | write_to_csv_file( 436 | "URL", 437 | url_duplicates, 438 | [ 439 | 'URL Hostname', 440 | 'Lead Name', 441 | 'Status Label', 442 | 'Lead Date Created', 443 | 'Lead ID', 444 | 'Close URL', 445 | ], 446 | ) 447 | -------------------------------------------------------------------------------- /scripts/import_leads_from_close_json.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import copy 3 | import json 4 | 5 | import gevent.monkey 6 | gevent.monkey.patch_all() 7 | 8 | from closeio_api import APIError, Client as CloseIO_API 9 | from gevent.pool import Pool 10 | 11 | 12 | 13 | parser = argparse.ArgumentParser( 14 | description='Import Close Leads from a Close JSON file into a New Org' 15 | ) 16 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 17 | parser.add_argument('--jsonfile', '-j', required=True, help='JSON File Path') 18 | args = parser.parse_args() 19 | api = CloseIO_API(args.api_key) 20 | 21 | # Create a list of active users for the sake of posting opps and activities. 22 | me = api.get('me') 23 | org = api.get( 24 | 'organization/' + me['organizations'][0]['id'], 25 | params={'_fields': 'memberships,inactive_memberships,name'}, 26 | ) 27 | org_name = org['name'] 28 | active_users = [i['user_id'] for i in org['memberships']] 29 | all_users = active_users + [i['user_id'] for i in org['inactive_memberships']] 30 | apikey_user_id = me['id'] 31 | 32 | # Create a list of lead and opportunity statuses currently in the org 33 | lead_statuses = api.get('status/lead')['data'] 34 | lead_status_labels = [i['label'] for i in lead_statuses] 35 | opportunity_statuses = api.get('status/opportunity')['data'] 36 | opportunity_status_labels = [i['label'] for i in opportunity_statuses] 37 | 38 | # Array to keep track of number of leads restored. Because we use gevent, we can't have a standard counter variable. 39 | total_leads_imported = [] 40 | 41 | # Array to keep track of leads that could not be posted. 42 | errored_leads = [] 43 | 44 | # Read in data file taken from args 45 | with open(args.jsonfile) as data_file: 46 | data = json.load(data_file) 47 | 48 | 49 | # Make sure all statuses in the JSON file exist in Close before continuing 50 | def postStatus(lead_or_opp, label, status_type): 51 | status_data = {'label': label} 52 | if lead_or_opp == 'opportunity': 53 | status_data['type'] = status_type 54 | try: 55 | api.post('status/' + lead_or_opp, data=status_data) 56 | except APIError as e: 57 | print(f"Cannot add status {label} to org because {str(e)}") 58 | 59 | 60 | # Make sure all lead and opp statuses are in Close 61 | lead_statuses_labels_in_json = [ 62 | i['status_label'] 63 | for i in data 64 | if i['status_label'] not in lead_status_labels 65 | ] 66 | lead_statuses_labels_in_json = list(set(lead_statuses_labels_in_json)) 67 | for label in lead_statuses_labels_in_json: 68 | postStatus('lead', label, None) 69 | lead_status_labels.append(label) 70 | 71 | for d in data: 72 | for opp in d['opportunities']: 73 | if opp['status_label'] not in opportunity_status_labels: 74 | postStatus('opportunity', opp['status_label'], opp['status_type']) 75 | opportunity_status_labels.append(opp['status_label']) 76 | 77 | # This is a dictionary that stores a mapping between old contact ids and new contact ids for restoration purposes. 78 | contact_id_mapping = {} 79 | 80 | 81 | # Import opps to the new lead 82 | def importOpportunities(opp_data, new_lead_id): 83 | for opp in opp_data: 84 | del opp['id'] 85 | if 'organization_id' in opp: 86 | del opp['organization_id'] 87 | if opp['user_id'] not in active_users: 88 | opp['user_id'] = apikey_user_id 89 | if ( 90 | 'contact_id' in opp 91 | and opp['contact_id'] != None 92 | and opp['contact_id'] in contact_id_mapping 93 | ): 94 | opp['contact_id'] = contact_id_mapping[opp['contact_id']] 95 | opp['status'] = opp['status_label'] 96 | del opp['status_id'] 97 | del opp['status_label'] 98 | opp['lead_id'] = new_lead_id 99 | try: 100 | api.post('opportunity', data=opp) 101 | except APIError as e: 102 | print(f"Could not post opp to {new_lead_id} because {str(e)}") 103 | 104 | 105 | # Import tasks to the new lead 106 | def importTasks(task_data, new_lead_id): 107 | for task in task_data: 108 | del task['id'] 109 | if 'organization_id' in task: 110 | del task['organization_id'] 111 | if task['assigned_to'] not in active_users: 112 | task['assigned_to'] = apikey_user_id 113 | task['lead_id'] = new_lead_id 114 | try: 115 | api.post('task', data=task) 116 | except APIError as e: 117 | print(f"Could not post task to {new_lead_id} because {str(e)}") 118 | 119 | 120 | # Import call, note, and SMS data to new lead. Assume that emails will be brought over via email sync. 121 | def importActivities(activity_data, new_lead_id): 122 | types = { 123 | 'Call': 'activity/call', 124 | 'SMS': 'activity/sms', 125 | 'Note': 'activity/note', 126 | } 127 | for activity in activity_data: 128 | if 'organization_id' in activity: 129 | del activity['organization_id'] 130 | activity['lead_id'] = new_lead_id 131 | if ( 132 | 'contact_id' in activity 133 | and activity['contact_id'] != None 134 | and activity['contact_id'] in contact_id_mapping 135 | ): 136 | activity['contact_id'] = contact_id_mapping[activity['contact_id']] 137 | if activity['_type'] == 'Call': 138 | if 'quality_info' in activity: 139 | del activity['quality_info'] 140 | activity['source'] = 'External' 141 | if activity['_type'] == 'SMS' and activity['status'] in [ 142 | 'outbox', 143 | 'scheduled', 144 | ]: 145 | activity['status'] = 'draft' 146 | try: 147 | api.post(types[activity['_type']], data=activity) 148 | except APIError as e: 149 | print( 150 | f"Could not post {activity['_type']} activity to {new_lead_id} because {str(e)}" 151 | ) 152 | 153 | 154 | # Remove task completed activities from top of lead. 155 | def removeTaskCompletedActivities(new_lead_id): 156 | has_more = True 157 | offset = 0 158 | task_completed_ids = [] 159 | while has_more: 160 | resp_task_completed = api.get( 161 | 'activity/task_completed', 162 | params={'_skip': offset, 'lead_id': new_lead_id, '_fields': 'id'}, 163 | ) 164 | task_completed_ids = [i['id'] for i in resp_task_completed['data']] 165 | offset += len(resp_task_completed['data']) 166 | has_more = resp_task_completed['has_more'] 167 | 168 | for completed_id in task_completed_ids: 169 | try: 170 | api.delete('activity/task_completed/' + completed_id) 171 | except APIError as e: 172 | print( 173 | f"Cannot delete completed task activity {completed_id} because {str(e)}" 174 | ) 175 | 176 | 177 | def restoreLead(lead): 178 | lead_data = {} 179 | lead_data['status'] = lead['status_label'] 180 | lead_data['name'] = lead['display_name'] 181 | lead_data['date_created'] = lead['date_created'] 182 | lead_data['created_by'] = lead['created_by'] 183 | lead_data['url'] = lead['url'] 184 | lead_data['description'] = lead['description'] 185 | 186 | # Clear users ids that have never been in the new Close org from user type custom fields: 187 | custom_data = copy.deepcopy(lead['custom']) 188 | for custom in lead['custom']: 189 | if ( 190 | lead['custom'].get(custom) 191 | and str(lead['custom'][custom]).startswith('user_') 192 | and lead['custom'][custom] not in all_users 193 | ): 194 | del custom_data[custom] 195 | lead_data['custom'] = custom_data 196 | lead_data['custom']['Original Lead ID'] = lead['id'] 197 | 198 | # Remove lead references from old contacts before posting to new leads 199 | contacts = copy.deepcopy(lead['contacts']) 200 | for contact in contacts: 201 | del contact['id'] 202 | del contact['lead_id'] 203 | lead_data['contacts'] = contacts 204 | 205 | # Post New Lead. 206 | try: 207 | post_lead = api.post('lead', data=lead_data) 208 | if 'id' in post_lead: 209 | new_lead_id = post_lead['id'] 210 | # Create contact mapping dictionary 211 | for i in range(0, len(lead['contacts'])): 212 | contact_id_mapping[lead['contacts'][i]['id']] = post_lead[ 213 | 'contacts' 214 | ][i]['id'] 215 | # Import Opportunities 216 | if 'opportunities' in lead and len(lead['opportunities']) > 0: 217 | importOpportunities(lead['opportunities'], new_lead_id) 218 | if 'tasks' in lead and len(lead['tasks']) > 0: 219 | importTasks(lead['tasks'], new_lead_id) 220 | # We want to remove task completed activities on the new lead because they will be posted at the top of the activity timeline 221 | # regardless of when they were actually completed. 222 | removeTaskCompletedActivities(new_lead_id) 223 | # Import Call, SMS, and Note data. We assume email data will be transfered over automatically 224 | if 'activities' in lead and len(lead['activities']) > 0: 225 | activity_array = [ 226 | i 227 | for i in lead['activities'] 228 | if i['_type'] in ['Call', 'Note', 'SMS'] 229 | ] 230 | importActivities(activity_array, new_lead_id) 231 | total_leads_imported.append(new_lead_id) 232 | print(f"{len(total_leads_imported)}: Imported {lead['id']}") 233 | 234 | except Exception as e: 235 | print(f"{lead['id']}: Lead could not be posted because {str(e)}") 236 | errored_leads.append(lead) 237 | 238 | 239 | print(f"Total leads being restored: {len(data)}") 240 | pool = Pool(5) 241 | pool.map(restoreLead, data) 242 | print(f"Total leads restored {len(total_leads_imported)}") 243 | print(f"Total leads not restored {(len(data) - len(total_leads_imported))}") 244 | 245 | # Write errored lead_ids to JSON File 246 | if len(errored_leads) > 0: 247 | with open( 248 | f'{org_name} Errored Leads from JSON Import.json', 'w' 249 | ) as outfile: 250 | json.dump(errored_leads, outfile, indent=4) 251 | -------------------------------------------------------------------------------- /scripts/move_custom_field_to_contact_info.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import click 3 | from closeio_api import APIError, Client as CloseIO_API 4 | 5 | 6 | @click.command() 7 | @click.option('-k', '--api-key', required=True, help='API key') 8 | @click.option( 9 | '--confirmed', 10 | is_flag=True, 11 | help='Without this flag, the script will do a dry run without actually updating any data.', 12 | ) 13 | @click.option( 14 | '--use_existing_contact', 15 | is_flag=True, 16 | help='Append the phone number from a custom field to an existing contact. If this flag is not used, a new contact will be created.', 17 | ) 18 | @click.option( 19 | '--new_contact_name', 20 | default='', 21 | help="If --use_existing_contact flag was not set, or if a lead doesn't contain any contacts, this is the name of the contact that will be created.", 22 | ) 23 | @click.option( 24 | '--phones_custom_field', 25 | default='all phones', 26 | help='Name of the custom field containing phones that should be moved into a contact.', 27 | ) 28 | @click.option( 29 | '--emails_custom_field', 30 | default='all emails', 31 | help='Name of the custom field containing emails that should be moved into a contact.', 32 | ) 33 | @click.option( 34 | '--title_custom_field', 35 | default='contact title', 36 | help='Name of the custom field containing a contact\'s title.', 37 | ) 38 | def run( 39 | api_key, 40 | confirmed, 41 | use_existing_contact=False, 42 | new_contact_name='', 43 | phones_custom_field='all phones', 44 | emails_custom_field='all emails', 45 | title_custom_field='contact title', 46 | ): 47 | """ 48 | After an import from a different CRM, for all leads, move emails and phones that were put in 49 | in a lead custom field to the lead's first contact (if--use_existing_contact flag was used) 50 | or create a new contact. 51 | """ 52 | 53 | print(f'confirmed: {confirmed}') 54 | print(f'phones_custom_field: {phones_custom_field}') 55 | print(f'emails_custom_field: {emails_custom_field}') 56 | print(f'title_custom_field: {title_custom_field}') 57 | print(f'use_existing_contact: {use_existing_contact}') 58 | 59 | api = CloseIO_API(api_key) 60 | has_more = True 61 | offset = 0 62 | 63 | while has_more: 64 | 65 | # Get a page of leads 66 | resp = api.get( 67 | 'lead', 68 | params={ 69 | 'query': '"custom.Source CRM":* not "custom.Migration completed":* sort:created', 70 | '_skip': offset, 71 | '_fields': 'id,display_name,name,contacts,custom', 72 | }, 73 | ) 74 | leads = resp['data'] 75 | 76 | for lead in leads: 77 | contacts = lead['contacts'] 78 | custom = lead['custom'] 79 | 80 | company_emails = custom.get(emails_custom_field, '') 81 | company_phones = custom.get(phones_custom_field, '') 82 | contact_title = custom.get(title_custom_field, '') 83 | 84 | if not company_phones and not company_emails and not contact_title: 85 | continue 86 | 87 | if company_emails: 88 | if company_emails.startswith('["'): 89 | company_emails = company_emails[2:-2].split('", "') 90 | else: 91 | company_emails = [company_emails] 92 | 93 | if company_phones: 94 | if company_phones.startswith('["'): 95 | company_phones = company_phones[2:-2].split('", "') 96 | else: 97 | company_phones = [company_phones] 98 | 99 | if contacts and use_existing_contact: 100 | contact = contacts[0] 101 | else: 102 | contact = {'lead_id': lead['id'], 'phones': [], 'emails': []} 103 | if new_contact_name: 104 | contact['name'] = new_contact_name 105 | 106 | for pn in company_phones: 107 | contact['phones'].append({'type': 'office', 'phone': pn}) 108 | for e in company_emails: 109 | contact['emails'].append({'type': 'office', 'email': e}) 110 | if contact_title: 111 | contact['title'] = contact_title 112 | 113 | print('Lead:', lead['id'], lead['name'].encode('utf8')) 114 | print( 115 | f'Emails: {custom.get(emails_custom_field)} => {company_emails}' 116 | ) 117 | print( 118 | f'Phones: {custom.get(phones_custom_field)} => {company_phones}' 119 | ) 120 | print( 121 | f'Title: {custom.get(title_custom_field)} => {contact_title}' 122 | ) 123 | 124 | try: 125 | if contact.get('id'): 126 | print('Updating an existing contact', contact['id']) 127 | if confirmed: 128 | api.put( 129 | 'contact/%s' % contact['id'], 130 | data={ 131 | 'phones': contact['phones'], 132 | 'emails': contact['emails'], 133 | }, 134 | ) 135 | else: 136 | print('Creating a new contact') 137 | if confirmed: 138 | api.post('contact', data=contact) 139 | print('Payload:', contact) 140 | if confirmed: 141 | api.put( 142 | 'lead/%s' % lead['id'], 143 | data={'custom.Migration completed': 'Yes'}, 144 | ) 145 | except APIError as e: 146 | print(str(e)) 147 | print('Payload:', contact) 148 | if confirmed: 149 | api.put( 150 | 'lead/%s' % lead['id'], 151 | data={'custom.Migration completed': 'skipped'}, 152 | ) 153 | 154 | print() 155 | 156 | if not confirmed: 157 | # If we don't actually update the "Migration completed" custom field, 158 | # we need to paginate 159 | offset += len(leads) 160 | 161 | has_more = resp['has_more'] 162 | 163 | print('Done') 164 | 165 | 166 | if __name__ == '__main__': 167 | run() 168 | -------------------------------------------------------------------------------- /scripts/restore_deleted_leads.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import gevent.monkey 4 | 5 | gevent.monkey.patch_all() 6 | from closeio_api import APIError, Client as CloseIO_API 7 | from gevent.pool import Pool 8 | 9 | parser = argparse.ArgumentParser( 10 | description='Restore an array of deleted leads by ID. This CANNOT restore status changes or call recordings.' 11 | ) 12 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 13 | group = parser.add_mutually_exclusive_group(required=True) 14 | group.add_argument( 15 | '--leads', help='List of lead IDs in a form of a comma separated list' 16 | ) 17 | group.add_argument( 18 | '--leads-file', 19 | help='List of lead IDs in a form of a textual file with single column of lead IDs', 20 | ) 21 | args = parser.parse_args() 22 | api = CloseIO_API(args.api_key) 23 | 24 | # Array of Lead IDs. Add the IDs you want to restore here. 25 | if args.leads: 26 | lead_ids = args.leads.split(",") 27 | elif args.leads_file: 28 | with open(args.leads_file) as f: 29 | lines = f.readlines() 30 | lead_ids = [el.strip() for el in lines] # Strip new lines 31 | lead_ids = list(filter(None, lead_ids)) # Strip empty lines 32 | 33 | # Create a list of active users for the sake of posting opps. 34 | org_id = api.get('me')['organizations'][0]['id'] 35 | memberships = api.get( 36 | 'organization/' + org_id, params={'_fields': 'memberships'} 37 | )['memberships'] 38 | active_users = [i['user_id'] for i in memberships] 39 | 40 | # Array to keep track of number of leads restored. Because we use gevent, we can't have a standard counter variable. 41 | total_leads_restored = [] 42 | 43 | # This is a list of object types you want to restore on the lead. We can also add activity.email, but in this script 44 | # it's assumed that email sync will take care of all of the emails that were deleted, assuming the same email accounts 45 | # are connected to Close. 46 | object_types = [ 47 | 'contact', 48 | 'opportunity', 49 | 'task.lead', 50 | 'activity.call', 51 | 'activity.note', 52 | 'activity.sms', 53 | ] 54 | 55 | # This is a dictionary that stores a mapping between old contact ids and new contact ids for restoration purposes. 56 | contact_id_mapping = {} 57 | 58 | 59 | def restore_objects(object_type, old_lead_id, new_lead_id): 60 | has_more = True 61 | cursor = '' 62 | while has_more: 63 | resp_objects = api.get( 64 | 'event', 65 | params={ 66 | 'object_type': object_type, 67 | 'action': 'deleted', 68 | '_cursor': cursor, 69 | 'lead_id': old_lead_id, 70 | }, 71 | ) 72 | for event in resp_objects['data']: 73 | old_contact_id = None 74 | if 'previous_data' in event: 75 | prev = event['previous_data'] 76 | if 'id' in prev: 77 | del prev['id'] 78 | 79 | # Map old contact ID to new contact ID 80 | if 'contact_id' in prev: 81 | if prev['contact_id'] in contact_id_mapping: 82 | prev['contact_id'] = contact_id_mapping[ 83 | prev['contact_id'] 84 | ] 85 | else: 86 | del prev['contact_id'] 87 | 88 | # Delete quality_info when posting a call 89 | if 'quality_info' in prev: 90 | del prev['quality_info'] 91 | 92 | # Set call source to External 93 | if object_type == 'activity.call': 94 | prev['source'] = 'External' 95 | 96 | # If the user assigned to the opp is no longer in the organization, we still want to post the opp, we just 97 | # can't have it assigned to that user_id. 98 | if ( 99 | object_type == 'opportunity' 100 | and 'user_id' in prev 101 | and prev['user_id'] not in active_users 102 | ): 103 | del prev['user_id'] 104 | 105 | # If anything was in outbox or scheduled, switch it to draft so it doesn't send accidentally at the wrong time. 106 | if object_type in ['activity.sms', 'activity.call'] and prev[ 107 | 'status' 108 | ] in ['outbox', 'scheduled']: 109 | prev['status'] == 'draft' 110 | 111 | # Set endpoint for posting. We need to change the activity and task object types to match the post endpoint 112 | # for their respective types. 113 | endpoint = object_type 114 | if 'activity' in endpoint: 115 | endpoint = endpoint.replace('.', '/') 116 | elif '.lead' in endpoint: 117 | endpoint = endpoint.replace('.lead', '') 118 | 119 | prev['lead_id'] = new_lead_id 120 | 121 | # Post the object to the new lead. 122 | try: 123 | post_request = api.post(endpoint, data=prev) 124 | 125 | # If we posted a contact, add the new contact id to the dictionary. 126 | if object_type == 'contact': 127 | contact_id_mapping[event['object_id']] = post_request[ 128 | 'id' 129 | ] 130 | except APIError as e: 131 | print( 132 | f"ERROR: Could not post {object_type} {event['object_id']} because {str(e)}" 133 | ) 134 | cursor = resp_objects['cursor_next'] 135 | has_more = bool(resp_objects['cursor_next']) 136 | 137 | 138 | def remove_task_completed_activities(new_lead_id): 139 | has_more = True 140 | offset = 0 141 | task_completed_ids = [] 142 | while has_more: 143 | resp_task_completed = api.get( 144 | 'activity/task_completed', 145 | params={'_skip': offset, 'lead_id': new_lead_id, '_fields': 'id'}, 146 | ) 147 | task_completed_ids = [i['id'] for i in resp_task_completed['data']] 148 | offset += len(resp_task_completed['data']) 149 | has_more = resp_task_completed['has_more'] 150 | 151 | for completed_id in task_completed_ids: 152 | try: 153 | api.delete('activity/task_completed/' + completed_id) 154 | except APIError as e: 155 | print( 156 | f"Cannot delete completed task activity {completed_id} because {str(e)}" 157 | ) 158 | 159 | 160 | def restore_lead(old_lead_id): 161 | resp_lead = api.get( 162 | 'event', 163 | params={ 164 | 'object_type': 'lead', 165 | 'action': 'deleted', 166 | 'lead_id': old_lead_id, 167 | }, 168 | ) 169 | if len(resp_lead['data']) > 0 and resp_lead['data'][0].get( 170 | 'previous_data' 171 | ): 172 | prev = resp_lead['data'][0]['previous_data'] 173 | if 'id' in prev: 174 | del prev['id'] 175 | # Post New Lead. 176 | try: 177 | post_lead = api.post('lead', data=prev) 178 | if 'id' in post_lead: 179 | new_lead_id = post_lead['id'] 180 | # Restore all objects on the lead. 181 | for object_type in object_types: 182 | restore_objects(object_type, old_lead_id, new_lead_id) 183 | 184 | # We want to remove task completed activities on the new lead because they will be posted at the top of the activity timeline 185 | # regardless of when they were actually completed. 186 | remove_task_completed_activities(new_lead_id) 187 | 188 | total_leads_restored.append(1) 189 | print(f"{len(total_leads_restored)}: Restored {old_lead_id}") 190 | except APIError as e: 191 | print(f"{old_lead_id}: Lead could not be posted because {str(e)}") 192 | else: 193 | print( 194 | f"{old_lead_id} could not be restored because there is no data to restore" 195 | ) 196 | 197 | 198 | print(f"Total leads being restored: {len(lead_ids)}") 199 | pool = Pool(5) 200 | pool.map(restore_lead, lead_ids) 201 | print(f"Total leads restored {len(total_leads_restored)}") 202 | print( 203 | f"Total leads not restored {(len(lead_ids) - len(total_leads_restored))}" 204 | ) 205 | -------------------------------------------------------------------------------- /scripts/run_leads_deleted_report.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | 4 | from closeio_api import Client as CloseIO_API 5 | 6 | parser = argparse.ArgumentParser( 7 | description='Create a CSV of all deleted leads in the past 30 days and see how they were deleted' 8 | ) 9 | 10 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 11 | parser.add_argument( 12 | '--print-lead-ids', 13 | '-p', 14 | action='store_true', 15 | help='Use this field to print lead_ids deleted in an array at the end of the script', 16 | ) 17 | 18 | args = parser.parse_args() 19 | 20 | api = CloseIO_API(args.api_key) 21 | 22 | has_more = True 23 | cursor = '' 24 | events = [] 25 | leads = [] 26 | reverted_imports = {} 27 | 28 | me = api.get('me') 29 | org_id = me['organizations'][0]['id'] 30 | org = api.get( 31 | f'organization/{org_id}', 32 | params={'_fields': 'name,memberships,inactive_memberships'}, 33 | ) 34 | org_memberships = org['memberships'] + org['inactive_memberships'] 35 | org_name = org['name'] 36 | 37 | memberships = me['memberships'] 38 | assert ( 39 | len(memberships) and memberships[0]['role_id'] == 'admin' 40 | ), 'ERROR: You must be an admin in your Close organization to run this script' 41 | 42 | users = {} 43 | 44 | for member in org_memberships: 45 | users[member['user_id']] = member['user_full_name'] 46 | 47 | print("Getting Leads deleted...") 48 | 49 | while has_more: 50 | resp = api.get( 51 | 'event', 52 | params={'object_type': 'lead', 'action': 'deleted', '_cursor': cursor}, 53 | ) 54 | for event in resp['data']: 55 | if args.print_lead_ids: 56 | leads.append(event['lead_id']) 57 | 58 | event_data = { 59 | 'username': "", 60 | 'date_created': event['date_created'], 61 | 'display_name': event['previous_data']['display_name'], 62 | 'lead_status': event['previous_data']['status_label'], 63 | 'lead_id': event['lead_id'], 64 | 'how_deleted': "", 65 | } 66 | 67 | if 'meta' in event: 68 | if 'bulk_action_id' in event['meta']: 69 | event_data['how_deleted'] = "Bulk Delete via Close (%s)" % ( 70 | event['meta']['bulk_action_id'] 71 | ) 72 | elif 'merge_source_lead_id' in event['meta']: 73 | event_data['how_deleted'] = "Merged with another lead (%s)" % ( 74 | event['meta']['merge_destination_lead_id'] 75 | ) 76 | elif 'revert_import_id' in event['meta']: 77 | event_data[ 78 | 'how_deleted' 79 | ] = "A Close Import Was Reverted (%s)" % ( 80 | event['meta']['revert_import_id'] 81 | ) 82 | if event['meta']['revert_import_id'] not in reverted_imports: 83 | reverted_import_activities = api.get( 84 | 'event', 85 | params={ 86 | 'object_type': 'import', 87 | 'object_id': event['meta']['revert_import_id'], 88 | }, 89 | ) 90 | import_deletions = [ 91 | i 92 | for i in reverted_import_activities['data'] 93 | if i['action'] == 'deleted' 94 | ] 95 | if ( 96 | len(import_deletions) > 0 97 | and 'user_id' in import_deletions[0] 98 | ): 99 | reverted_imports[ 100 | event['meta']['revert_import_id'] 101 | ] = import_deletions[0]['user_id'] 102 | if event['meta']['revert_import_id'] in reverted_imports: 103 | event_data['username'] = users[ 104 | reverted_imports[event['meta']['revert_import_id']] 105 | ] 106 | else: 107 | event_data[ 108 | 'how_deleted' 109 | ] = "Manually in Close or via a single API Call" 110 | 111 | if ( 112 | 'user_id' in event 113 | and event['user_id'] != None 114 | and event_data['username'] == "" 115 | ): 116 | event_data['username'] = users[event['user_id']] 117 | 118 | events.append(event_data) 119 | print(len(events)) 120 | cursor = resp['cursor_next'] 121 | has_more = bool(cursor) 122 | 123 | f = open( 124 | f'{org_name} Delete Lead Events in 30 Days.csv', 125 | 'w', 126 | newline='', 127 | encoding='utf-8', 128 | ) 129 | try: 130 | writer = csv.writer(f) 131 | writer.writerow( 132 | ( 133 | 'Date', 134 | 'User', 135 | ' Lead Name', 136 | ' Lead Status', 137 | 'Lead ID', 138 | 'How Was Lead Deleted?', 139 | ) 140 | ) 141 | for a in events: 142 | writer.writerow( 143 | ( 144 | '%s' % a['date_created'], 145 | '%s' % a['username'], 146 | '%s' % a['display_name'], 147 | '%s' % a['lead_status'], 148 | '%s' % a['lead_id'], 149 | '%s' % a['how_deleted'], 150 | ) 151 | ) 152 | finally: 153 | f.close() 154 | 155 | if args.print_lead_ids: 156 | print(f"Total Leads: {len(leads)}") 157 | print(leads) 158 | -------------------------------------------------------------------------------- /scripts/run_leads_merged_report.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | 4 | import gevent.monkey 5 | from closeio_api import APIError, Client as CloseIO_API 6 | from gevent.pool import Pool 7 | 8 | gevent.monkey.patch_all() 9 | 10 | parser = argparse.ArgumentParser( 11 | description='Get a list of all lead merge events for the last 30 days from your Close organization' 12 | ) 13 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 14 | args = parser.parse_args() 15 | 16 | # Initialize the Close API and get all users in the org 17 | api = CloseIO_API(args.api_key) 18 | 19 | org_id = api.get('me')['organizations'][0]['id'] 20 | org = api.get( 21 | f'organization/{org_id}', 22 | params={'_fields': 'inactive_memberships,memberships,name'}, 23 | ) 24 | org_name = org['name'].replace('/', '') 25 | 26 | memberships = org['memberships'] + org['inactive_memberships'] 27 | users = { 28 | membership['user_id']: membership['user_full_name'] 29 | for membership in memberships 30 | } 31 | 32 | 33 | # Method to get data about the deleted source lead added to the event 34 | def getSourceLeadData(event): 35 | print( 36 | f"{(events.index(event) + 1)} of {len(events)}: {event['Merge Event ID']}" 37 | ) 38 | source_delete_event = api.get( 39 | 'event', 40 | params={ 41 | 'object_type': 'lead', 42 | 'action': 'deleted', 43 | 'lead_id': event['Source Lead ID'], 44 | }, 45 | ) 46 | if len(source_delete_event['data']) > 0: 47 | delete_event = source_delete_event['data'][0] 48 | if delete_event.get('previous_data'): 49 | event['Source Lead Status'] = delete_event['previous_data'].get( 50 | 'status_label' 51 | ) 52 | event['Source Lead Name'] = delete_event['previous_data'].get( 53 | 'display_name' 54 | ) 55 | 56 | 57 | print("Getting all merge events...") 58 | 59 | has_more = True 60 | cursor = '' 61 | events = [] 62 | offset = 0 63 | 64 | # Get all merge events 65 | while has_more: 66 | try: 67 | resp = api.get( 68 | 'event', 69 | params={ 70 | 'object_type': 'lead', 71 | 'action': 'merged', 72 | '_cursor': cursor, 73 | }, 74 | ) 75 | for event in resp['data']: 76 | if ( 77 | event.get('data') 78 | and event.get('meta') 79 | and event['meta'].get('merge_source_lead_id') 80 | ): 81 | event_data = { 82 | 'Current Lead URL': 'https://app.close.com/lead/%s/' 83 | % event['meta']['merge_destination_lead_id'], 84 | 'Date': event['date_created'], 85 | 'Destination Lead Name': event['data']['display_name'], 86 | 'Destination Lead Status': event['data']['status_label'], 87 | 'Destination Lead ID': event['meta'][ 88 | 'merge_destination_lead_id' 89 | ], 90 | 'Source Lead ID': event['meta']['merge_source_lead_id'], 91 | 'Merge Event ID': event['id'], 92 | 'Close API Request ID': event['request_id'], 93 | } 94 | 95 | if event.get('user_id') and event['user_id'] in users: 96 | event_data['User'] = users[event['user_id']] 97 | 98 | events.append(event_data) 99 | cursor = resp['cursor_next'] 100 | has_more = bool(cursor) 101 | offset = len(events) 102 | print(f"Events found: {offset}") 103 | except APIError as e: 104 | print(f"Could not pull data for cursor: {cursor}") 105 | 106 | print("Getting data about the source lead for each merge event...") 107 | pool = Pool(7) 108 | pool.map(getSourceLeadData, events) 109 | 110 | # Write data to a CSV 111 | f = open( 112 | f'{org_name} Merge Lead Events in Last 30 Days.csv', 113 | 'w', 114 | newline='', 115 | encoding='utf-8', 116 | ) 117 | try: 118 | ordered_keys = [ 119 | 'Merge Event ID', 120 | 'Close API Request ID', 121 | 'Date', 122 | 'User', 123 | 'Destination Lead Name', 124 | 'Destination Lead Status', 125 | 'Destination Lead ID', 126 | 'Source Lead Name', 127 | 'Source Lead Status', 128 | 'Source Lead ID', 129 | 'Current Lead URL', 130 | ] 131 | writer = csv.DictWriter(f, ordered_keys) 132 | writer.writeheader() 133 | writer.writerows(events) 134 | finally: 135 | f.close() 136 | -------------------------------------------------------------------------------- /scripts/sample_script.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | from closeio_api import Client as CloseIO_API 4 | 5 | parser = argparse.ArgumentParser( 6 | description='Sample script used to test out whether the environment is set up correctly.' 7 | 'Script will print out the organization name associated with the provided API key.' 8 | ) 9 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 10 | args = parser.parse_args() 11 | 12 | api = CloseIO_API(args.api_key) 13 | organization = api.get("me")["organizations"][0] 14 | print( 15 | f"Close organization associated with this API key is '{organization['name']}'." 16 | ) 17 | -------------------------------------------------------------------------------- /scripts/time_to_respond_report.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import time 4 | from datetime import datetime, timedelta 5 | 6 | from closeio_api import Client as CloseIO_API 7 | from dateutil import tz 8 | 9 | parser = argparse.ArgumentParser( 10 | description='Get Time To Respond Metrics From Org' 11 | ) 12 | 13 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 14 | parser.add_argument( 15 | '--past-days', 16 | '-p', 17 | required=True, 18 | help='How many days in the past should we start the calculation?', 19 | ) 20 | parser.add_argument( 21 | '--org-count', 22 | '-o', 23 | action='store_true', 24 | help='Use this field if you also want org totals, not just active user totals. Note: Only use this field with short date ranges (i.e. 2 weeks maximum)', 25 | ) 26 | parser.add_argument( 27 | '--user-counts', 28 | '-u', 29 | action='store_true', 30 | help='Get stats per individual user', 31 | ) 32 | 33 | args = parser.parse_args() 34 | 35 | api = CloseIO_API(args.api_key) 36 | 37 | org_id = api.get('me')['organizations'][0]['id'] 38 | org_name = api.get('organization/' + org_id)['name'] 39 | org_memberships = api.get('organization/' + org_id)['memberships'] 40 | 41 | assert ( 42 | args.org_count or args.user_counts 43 | ), 'ERROR: Please include the org count parameter, the user counts parameter, or both' 44 | 45 | assert ( 46 | args.org_count and int(args.past_days) < 15 47 | ) or not args.org_count, 'ERROR: When using the org-count parameter, make sure that the past days parameter is less than 15' 48 | 49 | 50 | def pretty_time_delta(seconds): 51 | seconds = abs(int(seconds)) 52 | days, seconds = divmod(seconds, 86400) 53 | hours, seconds = divmod(seconds, 3600) 54 | minutes, seconds = divmod(seconds, 60) 55 | if days > 0: 56 | return '%dd %dh %dm %ds' % (days, hours, minutes, seconds) 57 | elif hours > 0: 58 | return '%dh %dm %ds' % (hours, minutes, seconds) 59 | elif minutes > 0: 60 | return '%dm %ds' % (minutes, seconds) 61 | else: 62 | return '%ds' % (seconds) 63 | 64 | 65 | tz_off = -time.timezone / 60 / 60 66 | 67 | today = datetime.utcnow().date() 68 | start = ( 69 | datetime(today.year, today.month, today.day, tzinfo=tz.tzutc()) 70 | - timedelta(days=int(args.past_days)) 71 | - timedelta(hours=tz_off) 72 | ) 73 | end = datetime( 74 | today.year, today.month, today.day, tzinfo=tz.tzutc() 75 | ) + timedelta(days=1) 76 | 77 | start = start.strftime("%Y-%m-%dT%H:%M:%S") 78 | end = end.strftime("%Y-%m-%dT%H:%M:%S") 79 | 80 | user_stats = [] 81 | 82 | 83 | def getTTR(user): 84 | if user != None: 85 | print( 86 | f"Getting all activities in the last {args.past_days} days for {user['user_full_name']}..." 87 | ) 88 | else: 89 | print( 90 | f"Getting all activities in the last {args.past_days} days for {'All Users'}..." 91 | ) 92 | 93 | has_more = True 94 | offset = 0 95 | seconds = 0 96 | seconds_inc = 0 97 | resp = None 98 | activities = [] 99 | 100 | while has_more: 101 | if user != None: 102 | resp = api.get( 103 | 'activity', 104 | params={ 105 | '_skip': offset, 106 | 'date_created__gte': start, 107 | 'date_created__lte': end, 108 | '_fields': '_type,id,date_created,lead_id,direction,user_id,duration', 109 | 'user_id': user['user_id'], 110 | }, 111 | ) 112 | else: 113 | resp = api.get( 114 | 'activity', 115 | params={ 116 | '_skip': offset, 117 | 'date_created__gte': start, 118 | 'date_created__lte': end, 119 | '_fields': '_type,id,date_created,lead_id,direction,user_id,duration', 120 | }, 121 | ) 122 | for activity in resp['data']: 123 | if ( 124 | activity['_type'] in ['Call', 'Email', 'SMS'] 125 | and activity['lead_id'] != None 126 | ): 127 | activity['date_created'] = ( 128 | activity['date_created'].split('+')[0].split('.')[0] 129 | ) 130 | activities.append(activity) 131 | print(offset) 132 | offset += len(resp['data']) 133 | has_more = resp['has_more'] 134 | if user == None: 135 | user = {} 136 | user['user_full_name'] = 'All Users' 137 | print(f"Getting TTR for {user['user_full_name']}...") 138 | 139 | responded_count = 0 140 | responded_count_with_not_responded_to_yet = 0 141 | total_time_to_respond_with_not_responded_to_yet = 0 142 | total_time_to_respond = 0 143 | 144 | inbound_activities = [ 145 | i 146 | for i in activities 147 | if ( 148 | (i['direction'] == 'incoming' or i['direction'] == 'inbound') 149 | and ( 150 | i['_type'] in ['SMS', 'Email'] 151 | or (i['_type'] == 'Call' and i['duration'] == 0) 152 | ) 153 | ) 154 | ] 155 | 156 | now = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S") 157 | 158 | for i in range(0, len(inbound_activities)): 159 | activities_for_this_lead = [ 160 | a 161 | for a in activities 162 | if a['lead_id'] == inbound_activities[i]['lead_id'] 163 | ] 164 | outbound_activities_for_this_lead = [ 165 | a 166 | for a in activities_for_this_lead 167 | if datetime.strptime( 168 | a['date_created'].split('.')[0], "%Y-%m-%dT%H:%M:%S" 169 | ) 170 | > datetime.strptime( 171 | inbound_activities[i]['date_created'].split('.')[0], 172 | "%Y-%m-%dT%H:%M:%S", 173 | ) 174 | and (a['direction'] == 'outbound' or a['direction'] == 'outgoing') 175 | ] 176 | if len(outbound_activities_for_this_lead) != 0: 177 | activity_after = outbound_activities_for_this_lead[ 178 | len(outbound_activities_for_this_lead) - 1 179 | ] 180 | diff = ( 181 | datetime.strptime( 182 | activity_after['date_created'].split('.')[0], 183 | "%Y-%m-%dT%H:%M:%S", 184 | ) 185 | - datetime.strptime( 186 | inbound_activities[i]['date_created'].split('.')[0], 187 | "%Y-%m-%dT%H:%M:%S", 188 | ) 189 | ).total_seconds() 190 | total_time_to_respond += diff 191 | total_time_to_respond_with_not_responded_to_yet += diff 192 | responded_count += 1 193 | responded_count_with_not_responded_to_yet += 1 194 | 195 | else: 196 | diff = ( 197 | datetime.strptime(now, "%Y-%m-%dT%H:%M:%S") 198 | - datetime.strptime( 199 | inbound_activities[i]['date_created'].split('.')[0], 200 | "%Y-%m-%dT%H:%M:%S", 201 | ) 202 | ).total_seconds() 203 | total_time_to_respond_with_not_responded_to_yet += diff 204 | responded_count_with_not_responded_to_yet += 1 205 | 206 | if responded_count != 0: 207 | seconds = int(float(total_time_to_respond) / float(responded_count)) 208 | 209 | if total_time_to_respond_with_not_responded_to_yet != 0: 210 | seconds_inc = int( 211 | float(total_time_to_respond_with_not_responded_to_yet) 212 | / float(responded_count_with_not_responded_to_yet) 213 | ) 214 | 215 | print( 216 | f"Average Time to Respond To Leads (Only Leads Alredy Responded To): {pretty_time_delta(seconds)}" 217 | ) 218 | print( 219 | f"Average Time to Respond To Leads (Including Leads Not Responded To Yet): {pretty_time_delta(seconds_inc)}" 220 | ) 221 | 222 | user_stat = { 223 | 'Total # of SMS': len([i for i in activities if i['_type'] == 'SMS']), 224 | 'Total # of Emails': len( 225 | [i for i in activities if i['_type'] == 'Email'] 226 | ), 227 | 'Total # of Calls': len( 228 | [i for i in activities if i['_type'] == 'Call'] 229 | ), 230 | 'Total # of Inbound Communications': len( 231 | [ 232 | i 233 | for i in activities 234 | if ( 235 | i['_type'] in ['SMS', 'Call', 'Email'] 236 | and i['direction'] in ['inbound', 'incoming'] 237 | ) 238 | ] 239 | ), 240 | 'Total # of Outbound Communications': len( 241 | [ 242 | i 243 | for i in activities 244 | if ( 245 | i['_type'] in ['SMS', 'Call', 'Email'] 246 | and i['direction'] in ['outbound', 'outgoing'] 247 | ) 248 | ] 249 | ), 250 | 'Average Time to Respond To Leads (Only Leads Alredy Responded To)': seconds, 251 | 'Average Time to Respond To Leads (Only Leads Alredy Responded To) Formatted': pretty_time_delta( 252 | seconds 253 | ), 254 | 'Average Time to Respond To Leads (Including Leads Not Responded To Yet)': seconds_inc, 255 | 'Average Time to Respond To Leads (Including Leads Not Responded To Yet) Formatted': pretty_time_delta( 256 | seconds_inc 257 | ), 258 | 'User Name': user['user_full_name'], 259 | } 260 | 261 | user_stats.append(user_stat) 262 | 263 | 264 | if args.user_counts: 265 | for membership in org_memberships: 266 | getTTR(membership) 267 | 268 | if args.org_count: 269 | getTTR(None) 270 | 271 | f = open( 272 | f'{org_name} Time to Respond Data Per User For The Past {args.past_days} days.csv', 273 | 'w', 274 | newline='', 275 | encoding='utf-8', 276 | ) 277 | try: 278 | keys = user_stats[0].keys() 279 | ordered_keys = [ 280 | 'User Name', 281 | 'Average Time to Respond To Leads (Only Leads Alredy Responded To) Formatted', 282 | 'Average Time to Respond To Leads (Including Leads Not Responded To Yet) Formatted', 283 | ] + [ 284 | i 285 | for i in keys 286 | if i 287 | not in [ 288 | 'Average Time to Respond To Leads (Including Leads Not Responded To Yet) Formatted', 289 | 'User Name', 290 | 'Average Time to Respond To Leads (Only Leads Alredy Responded To) Formatted', 291 | ] 292 | ] 293 | writer = csv.DictWriter(f, ordered_keys) 294 | writer.writeheader() 295 | writer.writerows(user_stats) 296 | finally: 297 | f.close() 298 | -------------------------------------------------------------------------------- /scripts/update_opportunities.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | 4 | from closeio_api import Client as CloseIO_API 5 | 6 | parser = argparse.ArgumentParser( 7 | description="Change all the opportunities for a given leads' search query to a given status." 8 | ) 9 | parser.add_argument('--api-key', '-k', required=True, help='API Key') 10 | parser.add_argument('--query', type=str, required=True, help='Search query.') 11 | parser.add_argument( 12 | '--status', type=str, required=True, help='Label of the new status' 13 | ) 14 | args = parser.parse_args() 15 | 16 | # Should tell you how many leads are going to be affected 17 | api = CloseIO_API(args.api_key) 18 | 19 | # Get the status_id 20 | org_id = api.get('api_key')['data'][0]['organization_id'] 21 | statuses = api.get('organization/{0}'.format(org_id))['opportunity_statuses'] 22 | new_status_id = [ 23 | st['id'] for st in statuses if st['label'].lower() == args.status.lower() 24 | ] 25 | if not new_status_id: 26 | print(f'Status not found: {args.status}') 27 | sys.exit(1) 28 | 29 | new_status_id = new_status_id[0] 30 | 31 | print(f'Gathering opportunities for {args.query}') 32 | 33 | has_more = True 34 | offset = 0 35 | limit = 50 36 | opp_ids = [] 37 | 38 | while has_more: 39 | resp = api.get( 40 | 'lead', params={'_skip': offset, '_limit': limit, 'query': args.query} 41 | ) 42 | opp_ids.extend( 43 | [opp['id'] for lead in resp['data'] for opp in lead['opportunities']] 44 | ) 45 | has_more = resp['has_more'] 46 | offset += limit 47 | 48 | ans = input( 49 | '{0} opportunities found. Do you want to update all of them to {1}? (y/n): '.format( 50 | len(opp_ids), args.status 51 | ) 52 | ) 53 | if ans.lower() != 'y': 54 | sys.exit(0) 55 | 56 | print(f'Updating opportunities to {args.status}') 57 | 58 | # Update opps 59 | for opp_id in opp_ids: 60 | resp = api.put( 61 | 'opportunity/{0}'.format(opp_id), data={'status_id': new_status_id} 62 | ) 63 | 64 | print('Done!') 65 | -------------------------------------------------------------------------------- /scripts/user_reassign.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | 7 | from closeio_api import APIError, Client as CloseIO_API 8 | 9 | parser = argparse.ArgumentParser( 10 | description='Assigns tasks or opportunities from one user to another' 11 | ) 12 | group_from = parser.add_mutually_exclusive_group(required=True) 13 | group_from.add_argument('--from-user-id', '-f', type=str, help='') 14 | group_from.add_argument('--from-user-email', type=str, help='') 15 | group_to = parser.add_mutually_exclusive_group(required=True) 16 | group_to.add_argument('--to-user-id', '-t', type=str, help='') 17 | group_to.add_argument('--to-user-email', type=str, help='') 18 | 19 | parser.add_argument('--api-key', '-k', required=True, help='API key') 20 | parser.add_argument( 21 | '--confirmed', 22 | '-c', 23 | action='store_true', 24 | help='Without this flag, the script will do a dry run without actually updating any data.', 25 | ) 26 | parser.add_argument( 27 | '--continue-on-error', 28 | '-s', 29 | action='store_true', 30 | help='Do not abort after first error', 31 | ) 32 | group = parser.add_argument_group() 33 | group.add_argument( 34 | '--tasks', 35 | '-T', 36 | action='store_true', 37 | help='reassign only non complete tasks', 38 | ) 39 | group.add_argument( 40 | '--all-tasks', action='store_true', help='reassign all tasks' 41 | ) 42 | group.add_argument( 43 | '--opportunities', 44 | '-O', 45 | action='store_true', 46 | help='reassign only active opportunities', 47 | ) 48 | group.add_argument( 49 | '--all-opportunities', 50 | action='store_true', 51 | help='reassign all opportunities', 52 | ) 53 | 54 | args = parser.parse_args() 55 | 56 | full_tasks = [] 57 | full_opps = [] 58 | 59 | if not any( 60 | [args.tasks, args.opportunities, args.all_tasks, args.all_opportunities] 61 | ): 62 | parser.error("at least one option required") 63 | 64 | log_format = "[%(asctime)s] %(levelname)s %(message)s" 65 | if not args.confirmed: 66 | log_format = 'DRY RUN: ' + log_format 67 | logging.basicConfig(level=logging.INFO, format=log_format) 68 | logging.debug(f'parameters: {vars(args)}') 69 | 70 | api = CloseIO_API(args.api_key) 71 | 72 | emails_to_ids = {} 73 | if any([args.from_user_email, args.to_user_email]): 74 | has_more = True 75 | offset = 0 76 | while has_more: 77 | resp = api.get('user', params={'_skip': offset}) 78 | for user in resp['data']: 79 | emails_to_ids[user['email']] = user['id'] 80 | offset += len(resp['data']) 81 | has_more = resp['has_more'] 82 | 83 | logging.debug(emails_to_ids) 84 | 85 | if args.from_user_email: 86 | from_user_id = emails_to_ids[args.from_user_email] 87 | else: 88 | # for exception, if user_id is not present in the database 89 | resp = api.get('user/' + args.from_user_id, params={'_fields': 'id,email'}) 90 | 91 | from_user_id = resp['id'] 92 | emails_to_ids[resp['email']] = resp['id'] 93 | 94 | if args.to_user_email: 95 | to_user_id = emails_to_ids[args.to_user_email] 96 | 97 | else: 98 | resp = api.get('user/' + args.to_user_id, params={'_fields': 'id,email'}) 99 | 100 | to_user_id = resp['id'] 101 | emails_to_ids[resp['email']] = resp['id'] 102 | 103 | ids_to_emails = dict((v, k) for k, v in emails_to_ids.items()) 104 | 105 | logging.info(f'from user_id {from_user_id} ({ids_to_emails[from_user_id]})') 106 | logging.info(f'to user_id: {to_user_id} ({ids_to_emails[to_user_id]})') 107 | 108 | assert from_user_id != to_user_id, 'equal user codes' 109 | 110 | opportunities_errors = 0 111 | tasks_errors = 0 112 | try: 113 | # tasks 114 | updated_tasks = 0 115 | if args.tasks or args.all_tasks: 116 | has_more = True 117 | offset = 0 118 | while has_more: 119 | payload = { 120 | 'assigned_to': from_user_id, 121 | '_order_by': 'date_created', 122 | '_skip': offset, 123 | '_fields': 'id', 124 | } 125 | 126 | if not args.all_tasks: 127 | payload['is_complete'] = False 128 | 129 | resp = api.get('task', params=payload) 130 | 131 | tasks = resp['data'] 132 | for task in tasks: 133 | if args.confirmed: 134 | full_tasks.append(task['id']) 135 | else: 136 | logging.info(f'updated {task["id"]}') 137 | updated_tasks += 1 138 | offset += len(tasks) 139 | has_more = resp['has_more'] 140 | 141 | for task_id in full_tasks: 142 | try: 143 | api.put('task/' + task_id, data={'assigned_to': to_user_id}) 144 | logging.info(f'updated {task_id}') 145 | updated_tasks += 1 146 | except APIError as e: 147 | tasks_errors += 1 148 | if not args.continue_on_error: 149 | raise e 150 | logging.error(f'task {task["id"]} skipped with error {str(e)}') 151 | 152 | # opportunities 153 | updated_opportunities = 0 154 | if args.opportunities or args.all_opportunities: 155 | has_more = True 156 | offset = 0 157 | while has_more: 158 | payload = { 159 | 'user_id': from_user_id, 160 | '_order_by': 'date_created', 161 | '_skip': offset, 162 | '_fields': 'id', 163 | } 164 | 165 | if not args.all_opportunities: 166 | payload['status_type'] = 'active' 167 | 168 | resp = api.get('opportunity', params=payload) 169 | 170 | opportunities = resp['data'] 171 | for opportunity in opportunities: 172 | if args.confirmed: 173 | full_opps.append(opportunity['id']) 174 | else: 175 | logging.info(f'updated {opportunity["id"]}') 176 | updated_opportunities += 1 177 | offset += len(opportunities) 178 | has_more = resp['has_more'] 179 | 180 | for opp_id in full_opps: 181 | try: 182 | api.put('opportunity/' + opp_id, data={'user_id': to_user_id}) 183 | logging.info(f'updated {opp_id}') 184 | updated_opportunities += 1 185 | except APIError as e: 186 | opportunities_errors += 1 187 | if not args.continue_on_error: 188 | raise e 189 | logging.error( 190 | f'opportunity {opportunity["id"]} skipped with error {str(e)}' 191 | ) 192 | except APIError as e: 193 | logging.error(f'stopped on error {str(e)}') 194 | 195 | logging.info( 196 | f'summary: updated tasks {updated_tasks}, updated opportunities {updated_opportunities}' 197 | ) 198 | if opportunities_errors or tasks_errors: 199 | logging.info( 200 | f'summary: tasks errors: {tasks_errors}, opportunities errors {opportunities_errors}' 201 | ) 202 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore= 3 | # !!! make sure you have a comma at the end of each line EXCEPT the LAST one 4 | # Indentation 5 | #E121,E122,E123,E124,E126,E127,E128,E131, 6 | # Comments should start with '# ' 7 | #E262,E265,E266, 8 | # Module level import not at top of file 9 | #E402, 10 | # Line too long 11 | #E501, 12 | # Ambiguous variable name 13 | #E741, 14 | # https://pypi.org/project/flake8-future-import/ 15 | FI1 16 | # Missing docstrings 17 | D1, 18 | # One-line docstring should fit on one line with quotes. 19 | # We ignore this because it's OK to buy yourself a few extra characters 20 | # for the summary line even if the summary line is *the only* line. 21 | #D200, 22 | # 1 blank line required between summary line and description 23 | #D205, 24 | # Multi-line docstring summary should start at the first line. 25 | # We ignore this because we agreed in #20553 that we we want to put the 26 | # summary line below """ for multi-line docstrings. 27 | D212, 28 | # First line should end with a period 29 | #D400, 30 | # First line should end with a period, question mark, or exclamation point. 31 | # TODO We should fix this. 32 | #D415, 33 | # variable in function should be lowercase - we use CONSTANT_LIKE stuff in functions 34 | #N806, 35 | # This is not PEP8-compliant and conflicts with black 36 | W503, 37 | W504, 38 | # This is not PEP8-compliant and conflicts with black 39 | E203, 40 | # Loop control variable 'x' not used within the loop body. 41 | #B007, 42 | # Do not call assert False 43 | #B011 44 | exclude=venv 45 | #max-complexity=2 46 | banned-modules= 47 | typing.Text = use str 48 | require-code=True 49 | 50 | [isort] 51 | skip=venv 52 | known_tests=tests 53 | sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TESTS,LOCALFOLDER 54 | default_section=THIRDPARTY 55 | use_parentheses=true 56 | multi_line_output=3 57 | include_trailing_comma=True 58 | force_grid_wrap=0 59 | combine_as_imports=True 60 | line_length=79 61 | float_to_top=True 62 | 63 | [mypy] 64 | python_version = 3.7 65 | ignore_missing_imports = True 66 | no_implicit_optional = True 67 | strict_equality = True 68 | follow_imports = skip 69 | warn_unreachable = True 70 | show_error_context = True 71 | pretty = True 72 | files = scripts --------------------------------------------------------------------------------